xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision eb3f19e46b38d5d7d03c8a3cfecdfb705cfcba06)
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>              /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h>
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "PCSetUp_BJacobi"
14 static PetscErrorCode PCSetUp_BJacobi(PC pc)
15 {
16   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
17   Mat            mat  = pc->mat,pmat = pc->pmat;
18   PetscErrorCode ierr,(*f)(Mat,Mat*);
19   PetscInt       N,M,start,i,sum,end;
20   PetscInt       bs,i_start=-1,i_end=-1;
21   PetscMPIInt    rank,size;
22   const char     *pprefix,*mprefix;
23 
24   PetscFunctionBegin;
25   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
26   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
27   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
28   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
29 
30   if (jac->n > 0 && jac->n < size) {
31     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
32     PetscFunctionReturn(0);
33   }
34 
35   /* --------------------------------------------------------------------------
36       Determines the number of blocks assigned to each processor
37   -----------------------------------------------------------------------------*/
38 
39   /*   local block count  given */
40   if (jac->n_local > 0 && jac->n < 0) {
41     ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
42     if (jac->l_lens) { /* check that user set these correctly */
43       sum = 0;
44       for (i=0; i<jac->n_local; i++) {
45         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
46         sum += jac->l_lens[i];
47       }
48       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
49     } else {
50       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
51       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
52     }
53   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
54     /* global blocks given: determine which ones are local */
55     if (jac->g_lens) {
56       /* check if the g_lens is has valid entries */
57       for (i=0; i<jac->n; i++) {
58         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
59         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
60       }
61       if (size == 1) {
62         jac->n_local = jac->n;
63         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
64         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
65         /* check that user set these correctly */
66         sum = 0;
67         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
68         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
69       } else {
70         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
71         /* loop over blocks determing first one owned by me */
72         sum = 0;
73         for (i=0; i<jac->n+1; i++) {
74           if (sum == start) { i_start = i; goto start_1;}
75           if (i < jac->n) sum += jac->g_lens[i];
76         }
77         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
78 start_1:
79         for (i=i_start; i<jac->n+1; i++) {
80           if (sum == end) { i_end = i; goto end_1; }
81           if (i < jac->n) sum += jac->g_lens[i];
82         }
83         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
84 end_1:
85         jac->n_local = i_end - i_start;
86         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
87         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
88       }
89     } else { /* no global blocks given, determine then using default layout */
90       jac->n_local = jac->n/size + ((jac->n % size) > rank);
91       ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
92       for (i=0; i<jac->n_local; i++) {
93         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
94         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
95       }
96     }
97   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
98     jac->n         = size;
99     jac->n_local   = 1;
100     ierr           = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
101     jac->l_lens[0] = M;
102   } else { /* jac->n > 0 && jac->n_local > 0 */
103     if (!jac->l_lens) {
104       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
105       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
106     }
107   }
108   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
109 
110   /* -------------------------
111       Determines mat and pmat
112   ---------------------------*/
113   ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
114   if (!f && size == 1) {
115     mat  = pc->mat;
116     pmat = pc->pmat;
117   } else {
118     if (pc->useAmat) {
119       /* use block from Amat matrix, not Pmat for local MatMult() */
120       ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr);
121       /* make submatrix have same prefix as entire matrix */
122       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr);
123       ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr);
124     }
125     if (pc->pmat != pc->mat || !pc->useAmat) {
126       ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr);
127       /* make submatrix have same prefix as entire matrix */
128       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
129       ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr);
130     } else pmat = mat;
131   }
132 
133   /* ------
134      Setup code depends on the number of blocks
135   */
136   if (jac->n_local == 1) {
137     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
138   } else {
139     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
140   }
141   PetscFunctionReturn(0);
142 }
143 
144 /* Default destroy, if it has never been setup */
145 #undef __FUNCT__
146 #define __FUNCT__ "PCDestroy_BJacobi"
147 static PetscErrorCode PCDestroy_BJacobi(PC pc)
148 {
149   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
150   PetscErrorCode ierr;
151 
152   PetscFunctionBegin;
153   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
154   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
155   ierr = PetscFree(pc->data);CHKERRQ(ierr);
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "PCSetFromOptions_BJacobi"
161 
162 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc)
163 {
164   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
165   PetscErrorCode ierr;
166   PetscInt       blocks,i;
167   PetscBool      flg;
168 
169   PetscFunctionBegin;
170   ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr);
171   ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
172   if (flg) {
173     ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr);
174   }
175   if (jac->ksp) {
176     /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
177      * unless we had already been called. */
178     for (i=0; i<jac->n_local; i++) {
179       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
180     }
181   }
182   ierr = PetscOptionsTail();CHKERRQ(ierr);
183   PetscFunctionReturn(0);
184 }
185 
186 #include <petscdraw.h>
187 #undef __FUNCT__
188 #define __FUNCT__ "PCView_BJacobi"
189 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
190 {
191   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
192   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
193   PetscErrorCode       ierr;
194   PetscMPIInt          rank;
195   PetscInt             i;
196   PetscBool            iascii,isstring,isdraw;
197   PetscViewer          sviewer;
198 
199   PetscFunctionBegin;
200   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
201   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
202   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
203   if (iascii) {
204     if (pc->useAmat) {
205       ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: using Amat local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
206     }
207     ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr);
208     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
209     if (jac->same_local_solves) {
210       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
211       if (jac->ksp && !jac->psubcomm) {
212         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
213         if (!rank) {
214           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
215           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
216           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
217         }
218         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
219       } else if (jac->psubcomm && !jac->psubcomm->color) {
220         ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr);
221         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
222         ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr);
223         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
224       }
225     } else {
226       PetscInt n_global;
227       ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
228       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
229       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
230       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
231                                                 rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
232       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
233       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
234       for (i=0; i<jac->n_local; i++) {
235         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
236         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
237         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
238       }
239       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
240       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
241       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
242       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
243     }
244   } else if (isstring) {
245     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
246     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
247     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
248     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
249   } else if (isdraw) {
250     PetscDraw draw;
251     char      str[25];
252     PetscReal x,y,bottom,h;
253 
254     ierr   = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
255     ierr   = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr);
256     ierr   = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr);
257     ierr   = PetscDrawBoxedString(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr);
258     bottom = y - h;
259     ierr   = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr);
260     /* warning the communicator on viewer is different then on ksp in parallel */
261     if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);}
262     ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr);
263   }
264   PetscFunctionReturn(0);
265 }
266 
267 /* -------------------------------------------------------------------------------------*/
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi"
271 static PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
272 {
273   PC_BJacobi *jac = (PC_BJacobi*)pc->data;;
274 
275   PetscFunctionBegin;
276   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
277 
278   if (n_local) *n_local = jac->n_local;
279   if (first_local) *first_local = jac->first_local;
280   *ksp                   = jac->ksp;
281   jac->same_local_solves = PETSC_FALSE;        /* Assume that local solves are now different;
282                                                   not necessarily true though!  This flag is
283                                                   used only for PCView_BJacobi() */
284   PetscFunctionReturn(0);
285 }
286 
287 #undef __FUNCT__
288 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi"
289 static PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
290 {
291   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
292   PetscErrorCode ierr;
293 
294   PetscFunctionBegin;
295   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
296   jac->n = blocks;
297   if (!lens) jac->g_lens = 0;
298   else {
299     ierr = PetscMalloc1(blocks,&jac->g_lens);CHKERRQ(ierr);
300     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
301     ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
302   }
303   PetscFunctionReturn(0);
304 }
305 
306 #undef __FUNCT__
307 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi"
308 static PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
309 {
310   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
311 
312   PetscFunctionBegin;
313   *blocks = jac->n;
314   if (lens) *lens = jac->g_lens;
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi"
320 static PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
321 {
322   PC_BJacobi     *jac;
323   PetscErrorCode ierr;
324 
325   PetscFunctionBegin;
326   jac = (PC_BJacobi*)pc->data;
327 
328   jac->n_local = blocks;
329   if (!lens) jac->l_lens = 0;
330   else {
331     ierr = PetscMalloc1(blocks,&jac->l_lens);CHKERRQ(ierr);
332     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
333     ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
334   }
335   PetscFunctionReturn(0);
336 }
337 
338 #undef __FUNCT__
339 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi"
340 static PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
341 {
342   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
343 
344   PetscFunctionBegin;
345   *blocks = jac->n_local;
346   if (lens) *lens = jac->l_lens;
347   PetscFunctionReturn(0);
348 }
349 
350 /* -------------------------------------------------------------------------------------*/
351 
352 #undef __FUNCT__
353 #define __FUNCT__ "PCBJacobiGetSubKSP"
354 /*@C
355    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
356    this processor.
357 
358    Note Collective
359 
360    Input Parameter:
361 .  pc - the preconditioner context
362 
363    Output Parameters:
364 +  n_local - the number of blocks on this processor, or NULL
365 .  first_local - the global number of the first block on this processor, or NULL
366 -  ksp - the array of KSP contexts
367 
368    Notes:
369    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
370 
371    Currently for some matrix implementations only 1 block per processor
372    is supported.
373 
374    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
375 
376    Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs.
377       You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,NULL_OBJECT,ierr) to determine how large the
378       KSP array must be.
379 
380    Level: advanced
381 
382 .keywords:  block, Jacobi, get, sub, KSP, context
383 
384 .seealso: PCBJacobiGetSubKSP()
385 @*/
386 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
387 {
388   PetscErrorCode ierr;
389 
390   PetscFunctionBegin;
391   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
392   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 #undef __FUNCT__
397 #define __FUNCT__ "PCBJacobiSetTotalBlocks"
398 /*@
399    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
400    Jacobi preconditioner.
401 
402    Collective on PC
403 
404    Input Parameters:
405 +  pc - the preconditioner context
406 .  blocks - the number of blocks
407 -  lens - [optional] integer array containing the size of each block
408 
409    Options Database Key:
410 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
411 
412    Notes:
413    Currently only a limited number of blocking configurations are supported.
414    All processors sharing the PC must call this routine with the same data.
415 
416    Level: intermediate
417 
418 .keywords:  set, number, Jacobi, global, total, blocks
419 
420 .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks()
421 @*/
422 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
423 {
424   PetscErrorCode ierr;
425 
426   PetscFunctionBegin;
427   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
428   if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
429   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
430   PetscFunctionReturn(0);
431 }
432 
433 #undef __FUNCT__
434 #define __FUNCT__ "PCBJacobiGetTotalBlocks"
435 /*@C
436    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
437    Jacobi preconditioner.
438 
439    Not Collective
440 
441    Input Parameter:
442 .  pc - the preconditioner context
443 
444    Output parameters:
445 +  blocks - the number of blocks
446 -  lens - integer array containing the size of each block
447 
448    Level: intermediate
449 
450 .keywords:  get, number, Jacobi, global, total, blocks
451 
452 .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks()
453 @*/
454 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
455 {
456   PetscErrorCode ierr;
457 
458   PetscFunctionBegin;
459   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
460   PetscValidIntPointer(blocks,2);
461   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
462   PetscFunctionReturn(0);
463 }
464 
465 #undef __FUNCT__
466 #define __FUNCT__ "PCBJacobiSetLocalBlocks"
467 /*@
468    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
469    Jacobi preconditioner.
470 
471    Not Collective
472 
473    Input Parameters:
474 +  pc - the preconditioner context
475 .  blocks - the number of blocks
476 -  lens - [optional] integer array containing size of each block
477 
478    Note:
479    Currently only a limited number of blocking configurations are supported.
480 
481    Level: intermediate
482 
483 .keywords: PC, set, number, Jacobi, local, blocks
484 
485 .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks()
486 @*/
487 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
488 {
489   PetscErrorCode ierr;
490 
491   PetscFunctionBegin;
492   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
493   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
494   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
495   PetscFunctionReturn(0);
496 }
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "PCBJacobiGetLocalBlocks"
500 /*@C
501    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
502    Jacobi preconditioner.
503 
504    Not Collective
505 
506    Input Parameters:
507 +  pc - the preconditioner context
508 .  blocks - the number of blocks
509 -  lens - [optional] integer array containing size of each block
510 
511    Note:
512    Currently only a limited number of blocking configurations are supported.
513 
514    Level: intermediate
515 
516 .keywords: PC, get, number, Jacobi, local, blocks
517 
518 .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks()
519 @*/
520 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
521 {
522   PetscErrorCode ierr;
523 
524   PetscFunctionBegin;
525   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
526   PetscValidIntPointer(blocks,2);
527   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
528   PetscFunctionReturn(0);
529 }
530 
531 /* -----------------------------------------------------------------------------------*/
532 
533 /*MC
534    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
535            its own KSP object.
536 
537    Options Database Keys:
538 .  -pc_use_amat - use Amat to apply block of operator in inner Krylov method
539 
540    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
541      than one processor. Defaults to one block per processor.
542 
543      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
544         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
545 
546      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
547          and set the options directly on the resulting KSP object (you can access its PC
548          KSPGetPC())
549 
550    Level: beginner
551 
552    Concepts: block Jacobi
553 
554    Developer Notes: This preconditioner does not currently work with CUDA/CUSP for a couple of reasons.
555        (1) It creates seq vectors as work vectors that should be cusp
556        (2) The use of VecPlaceArray() is not handled properly by CUSP (that is it will not know where
557            the ownership of the vector is so may use wrong values) even if it did know the ownership
558            it may induce extra copy ups and downs. Satish suggests a VecTransplantArray() to handle two
559            vectors sharing the same pointer and handling the CUSP side as well instead of VecGetArray()/VecPlaceArray().
560 
561 
562 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
563            PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
564            PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
565 M*/
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "PCCreate_BJacobi"
569 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
570 {
571   PetscErrorCode ierr;
572   PetscMPIInt    rank;
573   PC_BJacobi     *jac;
574 
575   PetscFunctionBegin;
576   ierr = PetscNewLog(pc,&jac);CHKERRQ(ierr);
577   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
578 
579   pc->ops->apply           = 0;
580   pc->ops->applytranspose  = 0;
581   pc->ops->setup           = PCSetUp_BJacobi;
582   pc->ops->destroy         = PCDestroy_BJacobi;
583   pc->ops->setfromoptions  = PCSetFromOptions_BJacobi;
584   pc->ops->view            = PCView_BJacobi;
585   pc->ops->applyrichardson = 0;
586 
587   pc->data               = (void*)jac;
588   jac->n                 = -1;
589   jac->n_local           = -1;
590   jac->first_local       = rank;
591   jac->ksp               = 0;
592   jac->same_local_solves = PETSC_TRUE;
593   jac->g_lens            = 0;
594   jac->l_lens            = 0;
595   jac->psubcomm          = 0;
596 
597   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
598   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
599   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
600   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
601   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
602   PetscFunctionReturn(0);
603 }
604 
605 /* --------------------------------------------------------------------------------------------*/
606 /*
607         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
608 */
609 #undef __FUNCT__
610 #define __FUNCT__ "PCReset_BJacobi_Singleblock"
611 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
612 {
613   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
614   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
615   PetscErrorCode         ierr;
616 
617   PetscFunctionBegin;
618   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
619   ierr = VecDestroy(&bjac->x);CHKERRQ(ierr);
620   ierr = VecDestroy(&bjac->y);CHKERRQ(ierr);
621   PetscFunctionReturn(0);
622 }
623 
624 #undef __FUNCT__
625 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock"
626 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
627 {
628   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
629   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
630   PetscErrorCode         ierr;
631 
632   PetscFunctionBegin;
633   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
634   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
635   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
636   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
637   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
638   ierr = PetscFree(bjac);CHKERRQ(ierr);
639   ierr = PetscFree(pc->data);CHKERRQ(ierr);
640   PetscFunctionReturn(0);
641 }
642 
643 #undef __FUNCT__
644 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock"
645 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
646 {
647   PetscErrorCode ierr;
648   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
649 
650   PetscFunctionBegin;
651   ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr);
652   PetscFunctionReturn(0);
653 }
654 
655 #undef __FUNCT__
656 #define __FUNCT__ "PCApply_BJacobi_Singleblock"
657 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
658 {
659   PetscErrorCode         ierr;
660   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
661   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
662   PetscScalar            *x_array,*y_array;
663 
664   PetscFunctionBegin;
665   /*
666       The VecPlaceArray() is to avoid having to copy the
667     y vector into the bjac->x vector. The reason for
668     the bjac->x vector is that we need a sequential vector
669     for the sequential solve.
670   */
671   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
672   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
673   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
674   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
675   /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
676      matrix may change even if the outter KSP/PC has not updated the preconditioner, this will trigger a rebuild
677      of the inner preconditioner automatically unless we pass down the outter preconditioners reuse flag.*/
678   ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr);
679   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
680   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
681   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
682   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
683   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 #undef __FUNCT__
688 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock"
689 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
690 {
691   PetscErrorCode         ierr;
692   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
693   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
694   PetscScalar            *x_array,*y_array;
695   PC                     subpc;
696 
697   PetscFunctionBegin;
698   /*
699       The VecPlaceArray() is to avoid having to copy the
700     y vector into the bjac->x vector. The reason for
701     the bjac->x vector is that we need a sequential vector
702     for the sequential solve.
703   */
704   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
705   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
706   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
707   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
708   /* apply the symmetric left portion of the inner PC operator */
709   /* note this by-passes the inner KSP and its options completely */
710   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
711   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
712   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
713   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
714   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
715   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
716   PetscFunctionReturn(0);
717 }
718 
719 #undef __FUNCT__
720 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock"
721 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
722 {
723   PetscErrorCode         ierr;
724   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
725   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
726   PetscScalar            *x_array,*y_array;
727   PC                     subpc;
728 
729   PetscFunctionBegin;
730   /*
731       The VecPlaceArray() is to avoid having to copy the
732     y vector into the bjac->x vector. The reason for
733     the bjac->x vector is that we need a sequential vector
734     for the sequential solve.
735   */
736   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
737   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
738   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
739   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
740 
741   /* apply the symmetric right portion of the inner PC operator */
742   /* note this by-passes the inner KSP and its options completely */
743 
744   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
745   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
746 
747   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
748   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
749   PetscFunctionReturn(0);
750 }
751 
752 #undef __FUNCT__
753 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock"
754 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
755 {
756   PetscErrorCode         ierr;
757   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
758   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
759   PetscScalar            *x_array,*y_array;
760 
761   PetscFunctionBegin;
762   /*
763       The VecPlaceArray() is to avoid having to copy the
764     y vector into the bjac->x vector. The reason for
765     the bjac->x vector is that we need a sequential vector
766     for the sequential solve.
767   */
768   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
769   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
770   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
771   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
772   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
773   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
774   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
775   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
776   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
777   PetscFunctionReturn(0);
778 }
779 
780 #undef __FUNCT__
781 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock"
782 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
783 {
784   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
785   PetscErrorCode         ierr;
786   PetscInt               m;
787   KSP                    ksp;
788   PC_BJacobi_Singleblock *bjac;
789   PetscBool              wasSetup = PETSC_TRUE;
790 
791   PetscFunctionBegin;
792   if (!pc->setupcalled) {
793     const char *prefix;
794 
795     if (!jac->ksp) {
796       wasSetup = PETSC_FALSE;
797 
798       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
799       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
800       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
801       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
802       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
803       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
804       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
805 
806       pc->ops->reset               = PCReset_BJacobi_Singleblock;
807       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
808       pc->ops->apply               = PCApply_BJacobi_Singleblock;
809       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
810       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
811       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
812       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
813 
814       ierr        = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
815       jac->ksp[0] = ksp;
816 
817       ierr      = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
818       jac->data = (void*)bjac;
819     } else {
820       ksp  = jac->ksp[0];
821       bjac = (PC_BJacobi_Singleblock*)jac->data;
822     }
823 
824     /*
825       The reason we need to generate these vectors is to serve
826       as the right-hand side and solution vector for the solve on the
827       block. We do not need to allocate space for the vectors since
828       that is provided via VecPlaceArray() just before the call to
829       KSPSolve() on the block.
830     */
831     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
832     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr);
833     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr);
834     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);CHKERRQ(ierr);
835     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);CHKERRQ(ierr);
836   } else {
837     ksp  = jac->ksp[0];
838     bjac = (PC_BJacobi_Singleblock*)jac->data;
839   }
840   if (pc->useAmat) {
841     ierr = KSPSetOperators(ksp,mat,pmat);CHKERRQ(ierr);
842   } else {
843     ierr = KSPSetOperators(ksp,pmat,pmat);CHKERRQ(ierr);
844   }
845   if (!wasSetup && pc->setfromoptionscalled) {
846     /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
847     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
848   }
849   PetscFunctionReturn(0);
850 }
851 
852 /* ---------------------------------------------------------------------------------------------*/
853 #undef __FUNCT__
854 #define __FUNCT__ "PCReset_BJacobi_Multiblock"
855 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
856 {
857   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
858   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
859   PetscErrorCode        ierr;
860   PetscInt              i;
861 
862   PetscFunctionBegin;
863   if (bjac && bjac->pmat) {
864     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
865     if (pc->useAmat) {
866       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
867     }
868   }
869 
870   for (i=0; i<jac->n_local; i++) {
871     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
872     if (bjac && bjac->x) {
873       ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr);
874       ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr);
875       ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr);
876     }
877   }
878   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
879   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 #undef __FUNCT__
884 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock"
885 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
886 {
887   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
888   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
889   PetscErrorCode        ierr;
890   PetscInt              i;
891 
892   PetscFunctionBegin;
893   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
894   if (bjac) {
895     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
896     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
897     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
898   }
899   ierr = PetscFree(jac->data);CHKERRQ(ierr);
900   for (i=0; i<jac->n_local; i++) {
901     ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr);
902   }
903   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
904   ierr = PetscFree(pc->data);CHKERRQ(ierr);
905   PetscFunctionReturn(0);
906 }
907 
908 #undef __FUNCT__
909 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock"
910 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
911 {
912   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
913   PetscErrorCode ierr;
914   PetscInt       i,n_local = jac->n_local;
915 
916   PetscFunctionBegin;
917   for (i=0; i<n_local; i++) {
918     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
919   }
920   PetscFunctionReturn(0);
921 }
922 
923 /*
924       Preconditioner for block Jacobi
925 */
926 #undef __FUNCT__
927 #define __FUNCT__ "PCApply_BJacobi_Multiblock"
928 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
929 {
930   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
931   PetscErrorCode        ierr;
932   PetscInt              i,n_local = jac->n_local;
933   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
934   PetscScalar           *xin,*yin;
935 
936   PetscFunctionBegin;
937   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
938   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
939   for (i=0; i<n_local; i++) {
940     /*
941        To avoid copying the subvector from x into a workspace we instead
942        make the workspace vector array point to the subpart of the array of
943        the global vector.
944     */
945     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
946     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
947 
948     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
949     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
950     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
951 
952     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
953     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
954   }
955   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
956   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 /*
961       Preconditioner for block Jacobi
962 */
963 #undef __FUNCT__
964 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock"
965 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
966 {
967   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
968   PetscErrorCode        ierr;
969   PetscInt              i,n_local = jac->n_local;
970   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
971   PetscScalar           *xin,*yin;
972 
973   PetscFunctionBegin;
974   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
975   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
976   for (i=0; i<n_local; i++) {
977     /*
978        To avoid copying the subvector from x into a workspace we instead
979        make the workspace vector array point to the subpart of the array of
980        the global vector.
981     */
982     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
983     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
984 
985     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
986     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
987     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
988 
989     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
990     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
991   }
992   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
993   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
994   PetscFunctionReturn(0);
995 }
996 
997 #undef __FUNCT__
998 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock"
999 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
1000 {
1001   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1002   PetscErrorCode        ierr;
1003   PetscInt              m,n_local,N,M,start,i;
1004   const char            *prefix,*pprefix,*mprefix;
1005   KSP                   ksp;
1006   Vec                   x,y;
1007   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1008   PC                    subpc;
1009   IS                    is;
1010   MatReuse              scall;
1011 
1012   PetscFunctionBegin;
1013   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
1014 
1015   n_local = jac->n_local;
1016 
1017   if (pc->useAmat) {
1018     PetscBool same;
1019     ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
1020     if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1021   }
1022 
1023   if (!pc->setupcalled) {
1024     scall = MAT_INITIAL_MATRIX;
1025 
1026     if (!jac->ksp) {
1027       pc->ops->reset         = PCReset_BJacobi_Multiblock;
1028       pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1029       pc->ops->apply         = PCApply_BJacobi_Multiblock;
1030       pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1031       pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1032 
1033       ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
1034       ierr = PetscMalloc1(n_local,&jac->ksp);CHKERRQ(ierr);
1035       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1036       ierr = PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);CHKERRQ(ierr);
1037       ierr = PetscMalloc1(n_local,&bjac->starts);CHKERRQ(ierr);
1038       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1039 
1040       jac->data = (void*)bjac;
1041       ierr      = PetscMalloc1(n_local,&bjac->is);CHKERRQ(ierr);
1042       ierr      = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1043 
1044       for (i=0; i<n_local; i++) {
1045         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1046         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1047         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
1048         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1049         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1050         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1051         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1052         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1053 
1054         jac->ksp[i] = ksp;
1055       }
1056     } else {
1057       bjac = (PC_BJacobi_Multiblock*)jac->data;
1058     }
1059 
1060     start = 0;
1061     for (i=0; i<n_local; i++) {
1062       m = jac->l_lens[i];
1063       /*
1064       The reason we need to generate these vectors is to serve
1065       as the right-hand side and solution vector for the solve on the
1066       block. We do not need to allocate space for the vectors since
1067       that is provided via VecPlaceArray() just before the call to
1068       KSPSolve() on the block.
1069 
1070       */
1071       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1072       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr);
1073       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)x);CHKERRQ(ierr);
1074       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)y);CHKERRQ(ierr);
1075 
1076       bjac->x[i]      = x;
1077       bjac->y[i]      = y;
1078       bjac->starts[i] = start;
1079 
1080       ierr        = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1081       bjac->is[i] = is;
1082       ierr        = PetscLogObjectParent((PetscObject)pc,(PetscObject)is);CHKERRQ(ierr);
1083 
1084       start += m;
1085     }
1086   } else {
1087     bjac = (PC_BJacobi_Multiblock*)jac->data;
1088     /*
1089        Destroy the blocks from the previous iteration
1090     */
1091     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1092       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1093       if (pc->useAmat) {
1094         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1095       }
1096       scall = MAT_INITIAL_MATRIX;
1097     } else scall = MAT_REUSE_MATRIX;
1098   }
1099 
1100   ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1101   if (pc->useAmat) {
1102     ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr);
1103     ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1104   }
1105   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1106      different boundary conditions for the submatrices than for the global problem) */
1107   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1108 
1109   ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr);
1110   for (i=0; i<n_local; i++) {
1111     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);CHKERRQ(ierr);
1112     ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr);
1113     if (pc->useAmat) {
1114       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);CHKERRQ(ierr);
1115       ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr);
1116       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);CHKERRQ(ierr);
1117     } else {
1118       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);CHKERRQ(ierr);
1119     }
1120     if (pc->setfromoptionscalled) {
1121       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1122     }
1123   }
1124   PetscFunctionReturn(0);
1125 }
1126 
1127 /* ---------------------------------------------------------------------------------------------*/
1128 /*
1129       These are for a single block with multiple processes;
1130 */
1131 #undef __FUNCT__
1132 #define __FUNCT__ "PCReset_BJacobi_Multiproc"
1133 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1134 {
1135   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1136   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1137   PetscErrorCode       ierr;
1138 
1139   PetscFunctionBegin;
1140   ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr);
1141   ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr);
1142   ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);
1143   if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);}
1144   PetscFunctionReturn(0);
1145 }
1146 
1147 #undef __FUNCT__
1148 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc"
1149 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1150 {
1151   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1152   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1153   PetscErrorCode       ierr;
1154 
1155   PetscFunctionBegin;
1156   ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr);
1157   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
1158   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1159   ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr);
1160 
1161   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1162   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1163   PetscFunctionReturn(0);
1164 }
1165 
1166 #undef __FUNCT__
1167 #define __FUNCT__ "PCApply_BJacobi_Multiproc"
1168 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1169 {
1170   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1171   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1172   PetscErrorCode       ierr;
1173   PetscScalar          *xarray,*yarray;
1174 
1175   PetscFunctionBegin;
1176   /* place x's and y's local arrays into xsub and ysub */
1177   ierr = VecGetArray(x,&xarray);CHKERRQ(ierr);
1178   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1179   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1180   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1181 
1182   /* apply preconditioner on each matrix block */
1183   ierr = PetscLogEventBegin(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1184   ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1185   ierr = PetscLogEventEnd(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1186 
1187   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1188   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1189   ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1190   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1191   PetscFunctionReturn(0);
1192 }
1193 
1194 #include <petsc-private/matimpl.h>
1195 #undef __FUNCT__
1196 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc"
1197 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1198 {
1199   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1200   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1201   PetscErrorCode       ierr;
1202   PetscInt             m,n;
1203   MPI_Comm             comm,subcomm=0;
1204   const char           *prefix;
1205   PetscBool            wasSetup = PETSC_TRUE;
1206 
1207   PetscFunctionBegin;
1208   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1209   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1210   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1211   if (!pc->setupcalled) {
1212     wasSetup  = PETSC_FALSE;
1213     ierr      = PetscNewLog(pc,&mpjac);CHKERRQ(ierr);
1214     jac->data = (void*)mpjac;
1215 
1216     /* initialize datastructure mpjac */
1217     if (!jac->psubcomm) {
1218       /* Create default contiguous subcommunicatiors if user does not provide them */
1219       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1220       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1221       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1222       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1223     }
1224     mpjac->psubcomm = jac->psubcomm;
1225     subcomm         = mpjac->psubcomm->comm;
1226 
1227     /* Get matrix blocks of pmat */
1228     if (!pc->pmat->ops->getmultiprocblock) SETERRQ(PetscObjectComm((PetscObject)pc->pmat),PETSC_ERR_SUP,"No support for the requested operation");
1229     ierr = (*pc->pmat->ops->getmultiprocblock)(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1230 
1231     /* create a new PC that processors in each subcomm have copy of */
1232     ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1233     ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr);
1234     ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr);
1235     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);CHKERRQ(ierr);
1236     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1237     ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr);
1238 
1239     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1240     ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr);
1241     ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr);
1242     /*
1243       PetscMPIInt rank,subsize,subrank;
1244       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1245       ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
1246       ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
1247 
1248       ierr = MatGetLocalSize(mpjac->submats,&m,NULL);CHKERRQ(ierr);
1249       ierr = MatGetSize(mpjac->submats,&n,NULL);CHKERRQ(ierr);
1250       ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank);
1251       ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1252     */
1253 
1254     /* create dummy vectors xsub and ysub */
1255     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1256     ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr);
1257     ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr);
1258     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);CHKERRQ(ierr);
1259     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);CHKERRQ(ierr);
1260 
1261     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1262     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1263     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1264   } else { /* pc->setupcalled */
1265     subcomm = mpjac->psubcomm->comm;
1266     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1267       /* destroy old matrix blocks, then get new matrix blocks */
1268       if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);}
1269       ierr = (*pc->pmat->ops->getmultiprocblock)(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1270     } else {
1271       ierr = (*pc->pmat->ops->getmultiprocblock)(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1272     }
1273     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1274   }
1275 
1276   if (!wasSetup && pc->setfromoptionscalled) {
1277     ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr);
1278   }
1279   PetscFunctionReturn(0);
1280 }
1281