xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision d0a29bd77466da32e237a4ee4d6c05447bce64bd)
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /*I "petscpc.h" I*/
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 static PetscErrorCode PCSetUp_BJacobi(PC pc)
13 {
14   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
15   Mat            mat  = pc->mat,pmat = pc->pmat;
16   PetscErrorCode ierr;
17   PetscBool      hasop;
18   PetscInt       N,M,start,i,sum,end;
19   PetscInt       bs,i_start=-1,i_end=-1;
20   PetscMPIInt    rank,size;
21 
22   PetscFunctionBegin;
23   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRMPI(ierr);
24   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr);
25   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
26   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
27 
28   if (jac->n > 0 && jac->n < size) {
29     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
30     PetscFunctionReturn(0);
31   }
32 
33   /* --------------------------------------------------------------------------
34       Determines the number of blocks assigned to each processor
35   -----------------------------------------------------------------------------*/
36 
37   /*   local block count  given */
38   if (jac->n_local > 0 && jac->n < 0) {
39     ierr = MPIU_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
40     if (jac->l_lens) { /* check that user set these correctly */
41       sum = 0;
42       for (i=0; i<jac->n_local; i++) {
43         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
44         sum += jac->l_lens[i];
45       }
46       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
47     } else {
48       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
49       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
50     }
51   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
52     /* global blocks given: determine which ones are local */
53     if (jac->g_lens) {
54       /* check if the g_lens is has valid entries */
55       for (i=0; i<jac->n; i++) {
56         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
57         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
58       }
59       if (size == 1) {
60         jac->n_local = jac->n;
61         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
62         ierr         = PetscArraycpy(jac->l_lens,jac->g_lens,jac->n_local);CHKERRQ(ierr);
63         /* check that user set these correctly */
64         sum = 0;
65         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
66         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
67       } else {
68         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
69         /* loop over blocks determing first one owned by me */
70         sum = 0;
71         for (i=0; i<jac->n+1; i++) {
72           if (sum == start) { i_start = i; goto start_1;}
73           if (i < jac->n) sum += jac->g_lens[i];
74         }
75         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
76 start_1:
77         for (i=i_start; i<jac->n+1; i++) {
78           if (sum == end) { i_end = i; goto end_1; }
79           if (i < jac->n) sum += jac->g_lens[i];
80         }
81         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
82 end_1:
83         jac->n_local = i_end - i_start;
84         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
85         ierr         = PetscArraycpy(jac->l_lens,jac->g_lens+i_start,jac->n_local);CHKERRQ(ierr);
86       }
87     } else { /* no global blocks given, determine then using default layout */
88       jac->n_local = jac->n/size + ((jac->n % size) > rank);
89       ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
90       for (i=0; i<jac->n_local; i++) {
91         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
92         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
93       }
94     }
95   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
96     jac->n         = size;
97     jac->n_local   = 1;
98     ierr           = PetscMalloc1(1,&jac->l_lens);CHKERRQ(ierr);
99     jac->l_lens[0] = M;
100   } else { /* jac->n > 0 && jac->n_local > 0 */
101     if (!jac->l_lens) {
102       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
103       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
104     }
105   }
106   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
107 
108   /* -------------------------
109       Determines mat and pmat
110   ---------------------------*/
111   ierr = MatHasOperation(pc->mat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
112   if (!hasop && size == 1) {
113     mat  = pc->mat;
114     pmat = pc->pmat;
115   } else {
116     if (pc->useAmat) {
117       /* use block from Amat matrix, not Pmat for local MatMult() */
118       ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr);
119     }
120     if (pc->pmat != pc->mat || !pc->useAmat) {
121       ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr);
122     } else pmat = mat;
123   }
124 
125   /* ------
126      Setup code depends on the number of blocks
127   */
128   if (jac->n_local == 1) {
129     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
130   } else {
131     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
132   }
133   PetscFunctionReturn(0);
134 }
135 
136 /* Default destroy, if it has never been setup */
137 static PetscErrorCode PCDestroy_BJacobi(PC pc)
138 {
139   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
140   PetscErrorCode ierr;
141 
142   PetscFunctionBegin;
143   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
144   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
145   ierr = PetscFree(pc->data);CHKERRQ(ierr);
146   PetscFunctionReturn(0);
147 }
148 
149 
150 static PetscErrorCode PCSetFromOptions_BJacobi(PetscOptionItems *PetscOptionsObject,PC pc)
151 {
152   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
153   PetscErrorCode ierr;
154   PetscInt       blocks,i;
155   PetscBool      flg;
156 
157   PetscFunctionBegin;
158   ierr = PetscOptionsHead(PetscOptionsObject,"Block Jacobi options");CHKERRQ(ierr);
159   ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
160   if (flg) {ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr);}
161   ierr = PetscOptionsInt("-pc_bjacobi_local_blocks","Local number of blocks","PCBJacobiSetLocalBlocks",jac->n_local,&blocks,&flg);CHKERRQ(ierr);
162   if (flg) {ierr = PCBJacobiSetLocalBlocks(pc,blocks,NULL);CHKERRQ(ierr);}
163   if (jac->ksp) {
164     /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
165      * unless we had already been called. */
166     for (i=0; i<jac->n_local; i++) {
167       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
168     }
169   }
170   ierr = PetscOptionsTail();CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #include <petscdraw.h>
175 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
176 {
177   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
178   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
179   PetscErrorCode       ierr;
180   PetscMPIInt          rank;
181   PetscInt             i;
182   PetscBool            iascii,isstring,isdraw;
183   PetscViewer          sviewer;
184 
185   PetscFunctionBegin;
186   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
187   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
188   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
189   if (iascii) {
190     if (pc->useAmat) {
191       ierr = PetscViewerASCIIPrintf(viewer,"  using Amat local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
192     }
193     ierr = PetscViewerASCIIPrintf(viewer,"  number of blocks = %D\n",jac->n);CHKERRQ(ierr);
194     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRMPI(ierr);
195     if (jac->same_local_solves) {
196       ierr = PetscViewerASCIIPrintf(viewer,"  Local solver is the same for all blocks, as in the following KSP and PC objects on rank 0:\n");CHKERRQ(ierr);
197       if (jac->ksp && !jac->psubcomm) {
198         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
199         if (!rank) {
200           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
201           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
202           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
203         }
204         ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
205         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
206         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
207         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
208         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
209       } else if (mpjac && jac->ksp && mpjac->psubcomm) {
210         ierr = PetscViewerGetSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr);
211         if (!mpjac->psubcomm->color) {
212           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
213           ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr);
214           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
215         }
216         ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
217         ierr = PetscViewerRestoreSubViewer(viewer,mpjac->psubcomm->child,&sviewer);CHKERRQ(ierr);
218         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
219         /*  extra call needed because of the two calls to PetscViewerASCIIPushSynchronized() in PetscViewerGetSubViewer() */
220         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
221       } else {
222         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
223       }
224     } else {
225       PetscInt n_global;
226       ierr = MPIU_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
227       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
228       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
229       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
230                                                 rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
231       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
232       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
233       for (i=0; i<jac->n_local; i++) {
234         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
235         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
236         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
237       }
238       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
239       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
240       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
241       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
242     }
243   } else if (isstring) {
244     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
245     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
246     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
247     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
248   } else if (isdraw) {
249     PetscDraw draw;
250     char      str[25];
251     PetscReal x,y,bottom,h;
252 
253     ierr   = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
254     ierr   = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr);
255     ierr   = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr);
256     ierr   = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr);
257     bottom = y - h;
258     ierr   = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr);
259     /* warning the communicator on viewer is different then on ksp in parallel */
260     if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);}
261     ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr);
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 /* -------------------------------------------------------------------------------------*/
267 
268 static PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
269 {
270   PC_BJacobi *jac = (PC_BJacobi*)pc->data;
271 
272   PetscFunctionBegin;
273   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
274 
275   if (n_local) *n_local = jac->n_local;
276   if (first_local) *first_local = jac->first_local;
277   *ksp                   = jac->ksp;
278   jac->same_local_solves = PETSC_FALSE;        /* Assume that local solves are now different;
279                                                   not necessarily true though!  This flag is
280                                                   used only for PCView_BJacobi() */
281   PetscFunctionReturn(0);
282 }
283 
284 static PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
285 {
286   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
287   PetscErrorCode ierr;
288 
289   PetscFunctionBegin;
290   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
291   jac->n = blocks;
292   if (!lens) jac->g_lens = NULL;
293   else {
294     ierr = PetscMalloc1(blocks,&jac->g_lens);CHKERRQ(ierr);
295     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
296     ierr = PetscArraycpy(jac->g_lens,lens,blocks);CHKERRQ(ierr);
297   }
298   PetscFunctionReturn(0);
299 }
300 
301 static PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
302 {
303   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
304 
305   PetscFunctionBegin;
306   *blocks = jac->n;
307   if (lens) *lens = jac->g_lens;
308   PetscFunctionReturn(0);
309 }
310 
311 static PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
312 {
313   PC_BJacobi     *jac;
314   PetscErrorCode ierr;
315 
316   PetscFunctionBegin;
317   jac = (PC_BJacobi*)pc->data;
318 
319   jac->n_local = blocks;
320   if (!lens) jac->l_lens = NULL;
321   else {
322     ierr = PetscMalloc1(blocks,&jac->l_lens);CHKERRQ(ierr);
323     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
324     ierr = PetscArraycpy(jac->l_lens,lens,blocks);CHKERRQ(ierr);
325   }
326   PetscFunctionReturn(0);
327 }
328 
329 static PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
330 {
331   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
332 
333   PetscFunctionBegin;
334   *blocks = jac->n_local;
335   if (lens) *lens = jac->l_lens;
336   PetscFunctionReturn(0);
337 }
338 
339 /* -------------------------------------------------------------------------------------*/
340 
341 /*@C
342    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
343    this processor.
344 
345    Not Collective
346 
347    Input Parameter:
348 .  pc - the preconditioner context
349 
350    Output Parameters:
351 +  n_local - the number of blocks on this processor, or NULL
352 .  first_local - the global number of the first block on this processor, or NULL
353 -  ksp - the array of KSP contexts
354 
355    Notes:
356    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
357 
358    Currently for some matrix implementations only 1 block per processor
359    is supported.
360 
361    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
362 
363    Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs.
364       You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,PETSC_NULL_KSP,ierr) to determine how large the
365       KSP array must be.
366 
367    Level: advanced
368 
369 .seealso: PCASMGetSubKSP()
370 @*/
371 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
372 {
373   PetscErrorCode ierr;
374 
375   PetscFunctionBegin;
376   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
377   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
378   PetscFunctionReturn(0);
379 }
380 
381 /*@
382    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
383    Jacobi preconditioner.
384 
385    Collective on PC
386 
387    Input Parameters:
388 +  pc - the preconditioner context
389 .  blocks - the number of blocks
390 -  lens - [optional] integer array containing the size of each block
391 
392    Options Database Key:
393 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
394 
395    Notes:
396    Currently only a limited number of blocking configurations are supported.
397    All processors sharing the PC must call this routine with the same data.
398 
399    Level: intermediate
400 
401 .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks()
402 @*/
403 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
404 {
405   PetscErrorCode ierr;
406 
407   PetscFunctionBegin;
408   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
409   if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
410   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
411   PetscFunctionReturn(0);
412 }
413 
414 /*@C
415    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
416    Jacobi preconditioner.
417 
418    Not Collective
419 
420    Input Parameter:
421 .  pc - the preconditioner context
422 
423    Output parameters:
424 +  blocks - the number of blocks
425 -  lens - integer array containing the size of each block
426 
427    Level: intermediate
428 
429 .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks()
430 @*/
431 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
432 {
433   PetscErrorCode ierr;
434 
435   PetscFunctionBegin;
436   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
437   PetscValidIntPointer(blocks,2);
438   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
439   PetscFunctionReturn(0);
440 }
441 
442 /*@
443    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
444    Jacobi preconditioner.
445 
446    Not Collective
447 
448    Input Parameters:
449 +  pc - the preconditioner context
450 .  blocks - the number of blocks
451 -  lens - [optional] integer array containing size of each block
452 
453    Options Database Key:
454 .  -pc_bjacobi_local_blocks <blocks> - Sets the number of local blocks
455 
456    Note:
457    Currently only a limited number of blocking configurations are supported.
458 
459    Level: intermediate
460 
461 .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks()
462 @*/
463 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
464 {
465   PetscErrorCode ierr;
466 
467   PetscFunctionBegin;
468   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
469   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
470   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
471   PetscFunctionReturn(0);
472 }
473 
474 /*@C
475    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
476    Jacobi preconditioner.
477 
478    Not Collective
479 
480    Input Parameters:
481 +  pc - the preconditioner context
482 .  blocks - the number of blocks
483 -  lens - [optional] integer array containing size of each block
484 
485    Note:
486    Currently only a limited number of blocking configurations are supported.
487 
488    Level: intermediate
489 
490 .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks()
491 @*/
492 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
493 {
494   PetscErrorCode ierr;
495 
496   PetscFunctionBegin;
497   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
498   PetscValidIntPointer(blocks,2);
499   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
500   PetscFunctionReturn(0);
501 }
502 
503 /* -----------------------------------------------------------------------------------*/
504 
505 /*MC
506    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
507            its own KSP object.
508 
509    Options Database Keys:
510 +  -pc_use_amat - use Amat to apply block of operator in inner Krylov method
511 -  -pc_bjacobi_blocks <n> - use n total blocks
512 
513    Notes:
514     Each processor can have one or more blocks, or a single block can be shared by several processes. Defaults to one block per processor.
515 
516      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
517         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
518 
519      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
520          and set the options directly on the resulting KSP object (you can access its PC
521          KSPGetPC())
522 
523      For GPU-based vectors (CUDA, ViennaCL) it is recommended to use exactly one block per MPI process for best
524          performance.  Different block partitioning may lead to additional data transfers
525          between host and GPU that lead to degraded performance.
526 
527      The options prefix for each block is sub_, for example -sub_pc_type lu.
528 
529      When multiple processes share a single block, each block encompasses exactly all the unknowns owned its set of processes.
530 
531    Level: beginner
532 
533 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
534            PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
535            PCBJacobiSetLocalBlocks(), PCSetModifySubMatrices()
536 M*/
537 
538 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
539 {
540   PetscErrorCode ierr;
541   PetscMPIInt    rank;
542   PC_BJacobi     *jac;
543 
544   PetscFunctionBegin;
545   ierr = PetscNewLog(pc,&jac);CHKERRQ(ierr);
546   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRMPI(ierr);
547 
548   pc->ops->apply           = NULL;
549   pc->ops->matapply        = NULL;
550   pc->ops->applytranspose  = NULL;
551   pc->ops->setup           = PCSetUp_BJacobi;
552   pc->ops->destroy         = PCDestroy_BJacobi;
553   pc->ops->setfromoptions  = PCSetFromOptions_BJacobi;
554   pc->ops->view            = PCView_BJacobi;
555   pc->ops->applyrichardson = NULL;
556 
557   pc->data               = (void*)jac;
558   jac->n                 = -1;
559   jac->n_local           = -1;
560   jac->first_local       = rank;
561   jac->ksp               = NULL;
562   jac->same_local_solves = PETSC_TRUE;
563   jac->g_lens            = NULL;
564   jac->l_lens            = NULL;
565   jac->psubcomm          = NULL;
566 
567   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
568   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
569   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
570   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
571   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
572   PetscFunctionReturn(0);
573 }
574 
575 /* --------------------------------------------------------------------------------------------*/
576 /*
577         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
578 */
579 static PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
580 {
581   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
582   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
583   PetscErrorCode         ierr;
584 
585   PetscFunctionBegin;
586   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
587   ierr = VecDestroy(&bjac->x);CHKERRQ(ierr);
588   ierr = VecDestroy(&bjac->y);CHKERRQ(ierr);
589   PetscFunctionReturn(0);
590 }
591 
592 static PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
593 {
594   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
595   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
596   PetscErrorCode         ierr;
597 
598   PetscFunctionBegin;
599   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
600   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
601   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
602   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
603   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
604   ierr = PetscFree(bjac);CHKERRQ(ierr);
605   ierr = PetscFree(pc->data);CHKERRQ(ierr);
606   PetscFunctionReturn(0);
607 }
608 
609 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
610 {
611   PetscErrorCode     ierr;
612   PC_BJacobi         *jac = (PC_BJacobi*)pc->data;
613   KSP                subksp = jac->ksp[0];
614   KSPConvergedReason reason;
615 
616   PetscFunctionBegin;
617   ierr = KSPSetUp(subksp);CHKERRQ(ierr);
618   ierr = KSPGetConvergedReason(subksp,&reason);CHKERRQ(ierr);
619   if (reason == KSP_DIVERGED_PC_FAILED) {
620     pc->failedreason = PC_SUBPC_ERROR;
621   }
622   PetscFunctionReturn(0);
623 }
624 
625 static PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
626 {
627   PetscErrorCode         ierr;
628   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
629   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
630 
631   PetscFunctionBegin;
632   ierr = VecGetLocalVectorRead(x, bjac->x);CHKERRQ(ierr);
633   ierr = VecGetLocalVector(y, bjac->y);CHKERRQ(ierr);
634  /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
635      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
636      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
637   ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr);
638   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
639   ierr = KSPCheckSolve(jac->ksp[0],pc,bjac->y);CHKERRQ(ierr);
640   ierr = VecRestoreLocalVectorRead(x, bjac->x);CHKERRQ(ierr);
641   ierr = VecRestoreLocalVector(y, bjac->y);CHKERRQ(ierr);
642   PetscFunctionReturn(0);
643 }
644 
645 static PetscErrorCode PCMatApply_BJacobi_Singleblock(PC pc,Mat X,Mat Y)
646 {
647   PC_BJacobi     *jac  = (PC_BJacobi*)pc->data;
648   Mat            sX,sY;
649   PetscErrorCode ierr;
650 
651   PetscFunctionBegin;
652  /* Since the inner KSP matrix may point directly to the diagonal block of an MPI matrix the inner
653      matrix may change even if the outer KSP/PC has not updated the preconditioner, this will trigger a rebuild
654      of the inner preconditioner automatically unless we pass down the outer preconditioners reuse flag.*/
655   ierr = KSPSetReusePreconditioner(jac->ksp[0],pc->reusepreconditioner);CHKERRQ(ierr);
656   ierr = MatDenseGetLocalMatrix(X,&sX);CHKERRQ(ierr);
657   ierr = MatDenseGetLocalMatrix(Y,&sY);CHKERRQ(ierr);
658   ierr = KSPMatSolve(jac->ksp[0],sX,sY);CHKERRQ(ierr);
659   PetscFunctionReturn(0);
660 }
661 
662 static PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
663 {
664   PetscErrorCode         ierr;
665   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
666   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
667   PetscScalar            *y_array;
668   const PetscScalar      *x_array;
669   PC                     subpc;
670 
671   PetscFunctionBegin;
672   /*
673       The VecPlaceArray() is to avoid having to copy the
674     y vector into the bjac->x vector. The reason for
675     the bjac->x vector is that we need a sequential vector
676     for the sequential solve.
677   */
678   ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr);
679   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
680   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
681   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
682   /* apply the symmetric left portion of the inner PC operator */
683   /* note this by-passes the inner KSP and its options completely */
684   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
685   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
686   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
687   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
688   ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr);
689   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
690   PetscFunctionReturn(0);
691 }
692 
693 static PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
694 {
695   PetscErrorCode         ierr;
696   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
697   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
698   PetscScalar            *y_array;
699   const PetscScalar      *x_array;
700   PC                     subpc;
701 
702   PetscFunctionBegin;
703   /*
704       The VecPlaceArray() is to avoid having to copy the
705     y vector into the bjac->x vector. The reason for
706     the bjac->x vector is that we need a sequential vector
707     for the sequential solve.
708   */
709   ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr);
710   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
711   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
712   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
713 
714   /* apply the symmetric right portion of the inner PC operator */
715   /* note this by-passes the inner KSP and its options completely */
716 
717   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
718   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
719 
720   ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr);
721   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
722   PetscFunctionReturn(0);
723 }
724 
725 static PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
726 {
727   PetscErrorCode         ierr;
728   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
729   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
730   PetscScalar            *y_array;
731   const PetscScalar      *x_array;
732 
733   PetscFunctionBegin;
734   /*
735       The VecPlaceArray() is to avoid having to copy the
736     y vector into the bjac->x vector. The reason for
737     the bjac->x vector is that we need a sequential vector
738     for the sequential solve.
739   */
740   ierr = VecGetArrayRead(x,&x_array);CHKERRQ(ierr);
741   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
742   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
743   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
744   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
745   ierr = KSPCheckSolve(jac->ksp[0],pc,bjac->y);CHKERRQ(ierr);
746   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
747   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
748   ierr = VecRestoreArrayRead(x,&x_array);CHKERRQ(ierr);
749   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
754 {
755   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
756   PetscErrorCode         ierr;
757   PetscInt               m;
758   KSP                    ksp;
759   PC_BJacobi_Singleblock *bjac;
760   PetscBool              wasSetup = PETSC_TRUE;
761   VecType                vectype;
762   const char             *prefix;
763 
764   PetscFunctionBegin;
765   if (!pc->setupcalled) {
766 
767     if (!jac->ksp) {
768       wasSetup = PETSC_FALSE;
769 
770       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
771       ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
772       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
773       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
774       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
775       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
776       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
777       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
778 
779       pc->ops->reset               = PCReset_BJacobi_Singleblock;
780       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
781       pc->ops->apply               = PCApply_BJacobi_Singleblock;
782       pc->ops->matapply            = PCMatApply_BJacobi_Singleblock;
783       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
784       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
785       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
786       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
787 
788       ierr        = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr);
789       jac->ksp[0] = ksp;
790 
791       ierr      = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
792       jac->data = (void*)bjac;
793     } else {
794       ksp  = jac->ksp[0];
795       bjac = (PC_BJacobi_Singleblock*)jac->data;
796     }
797 
798     /*
799       The reason we need to generate these vectors is to serve
800       as the right-hand side and solution vector for the solve on the
801       block. We do not need to allocate space for the vectors since
802       that is provided via VecPlaceArray() just before the call to
803       KSPSolve() on the block.
804     */
805     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
806     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr);
807     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr);
808     ierr = MatGetVecType(pmat,&vectype);CHKERRQ(ierr);
809     ierr = VecSetType(bjac->x,vectype);CHKERRQ(ierr);
810     ierr = VecSetType(bjac->y,vectype);CHKERRQ(ierr);
811     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);CHKERRQ(ierr);
812     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);CHKERRQ(ierr);
813   } else {
814     ksp  = jac->ksp[0];
815     bjac = (PC_BJacobi_Singleblock*)jac->data;
816   }
817   ierr = KSPGetOptionsPrefix(ksp,&prefix);CHKERRQ(ierr);
818   if (pc->useAmat) {
819     ierr = KSPSetOperators(ksp,mat,pmat);CHKERRQ(ierr);
820     ierr = MatSetOptionsPrefix(mat,prefix);CHKERRQ(ierr);
821   } else {
822     ierr = KSPSetOperators(ksp,pmat,pmat);CHKERRQ(ierr);
823   }
824   ierr = MatSetOptionsPrefix(pmat,prefix);CHKERRQ(ierr);
825   if (!wasSetup && pc->setfromoptionscalled) {
826     /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
827     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
828   }
829   PetscFunctionReturn(0);
830 }
831 
832 /* ---------------------------------------------------------------------------------------------*/
833 static PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
834 {
835   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
836   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
837   PetscErrorCode        ierr;
838   PetscInt              i;
839 
840   PetscFunctionBegin;
841   if (bjac && bjac->pmat) {
842     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
843     if (pc->useAmat) {
844       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
845     }
846   }
847 
848   for (i=0; i<jac->n_local; i++) {
849     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
850     if (bjac && bjac->x) {
851       ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr);
852       ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr);
853       ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr);
854     }
855   }
856   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
857   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 static PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
862 {
863   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
864   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
865   PetscErrorCode        ierr;
866   PetscInt              i;
867 
868   PetscFunctionBegin;
869   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
870   if (bjac) {
871     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
872     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
873     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
874   }
875   ierr = PetscFree(jac->data);CHKERRQ(ierr);
876   for (i=0; i<jac->n_local; i++) {
877     ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr);
878   }
879   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
880   ierr = PetscFree(pc->data);CHKERRQ(ierr);
881   PetscFunctionReturn(0);
882 }
883 
884 static PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
885 {
886   PC_BJacobi         *jac = (PC_BJacobi*)pc->data;
887   PetscErrorCode     ierr;
888   PetscInt           i,n_local = jac->n_local;
889   KSPConvergedReason reason;
890 
891   PetscFunctionBegin;
892   for (i=0; i<n_local; i++) {
893     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
894     ierr = KSPGetConvergedReason(jac->ksp[i],&reason);CHKERRQ(ierr);
895     if (reason == KSP_DIVERGED_PC_FAILED) {
896       pc->failedreason = PC_SUBPC_ERROR;
897     }
898   }
899   PetscFunctionReturn(0);
900 }
901 
902 /*
903       Preconditioner for block Jacobi
904 */
905 static PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
906 {
907   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
908   PetscErrorCode        ierr;
909   PetscInt              i,n_local = jac->n_local;
910   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
911   PetscScalar           *yin;
912   const PetscScalar     *xin;
913 
914   PetscFunctionBegin;
915   ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr);
916   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
917   for (i=0; i<n_local; i++) {
918     /*
919        To avoid copying the subvector from x into a workspace we instead
920        make the workspace vector array point to the subpart of the array of
921        the global vector.
922     */
923     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
924     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
925 
926     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
927     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
928     ierr = KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);CHKERRQ(ierr);
929     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
930 
931     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
932     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
933   }
934   ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr);
935   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
936   PetscFunctionReturn(0);
937 }
938 
939 /*
940       Preconditioner for block Jacobi
941 */
942 static PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
943 {
944   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
945   PetscErrorCode        ierr;
946   PetscInt              i,n_local = jac->n_local;
947   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
948   PetscScalar           *yin;
949   const PetscScalar     *xin;
950 
951   PetscFunctionBegin;
952   ierr = VecGetArrayRead(x,&xin);CHKERRQ(ierr);
953   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
954   for (i=0; i<n_local; i++) {
955     /*
956        To avoid copying the subvector from x into a workspace we instead
957        make the workspace vector array point to the subpart of the array of
958        the global vector.
959     */
960     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
961     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
962 
963     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
964     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
965     ierr = KSPCheckSolve(jac->ksp[i],pc,bjac->y[i]);CHKERRQ(ierr);
966     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
967 
968     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
969     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
970   }
971   ierr = VecRestoreArrayRead(x,&xin);CHKERRQ(ierr);
972   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
973   PetscFunctionReturn(0);
974 }
975 
976 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
977 {
978   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
979   PetscErrorCode        ierr;
980   PetscInt              m,n_local,N,M,start,i;
981   const char            *prefix;
982   KSP                   ksp;
983   Vec                   x,y;
984   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
985   PC                    subpc;
986   IS                    is;
987   MatReuse              scall;
988   VecType               vectype;
989 
990   PetscFunctionBegin;
991   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
992 
993   n_local = jac->n_local;
994 
995   if (pc->useAmat) {
996     PetscBool same;
997     ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
998     if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
999   }
1000 
1001   if (!pc->setupcalled) {
1002     scall = MAT_INITIAL_MATRIX;
1003 
1004     if (!jac->ksp) {
1005       pc->ops->reset         = PCReset_BJacobi_Multiblock;
1006       pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1007       pc->ops->apply         = PCApply_BJacobi_Multiblock;
1008       pc->ops->matapply      = NULL;
1009       pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1010       pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1011 
1012       ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
1013       ierr = PetscMalloc1(n_local,&jac->ksp);CHKERRQ(ierr);
1014       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1015       ierr = PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);CHKERRQ(ierr);
1016       ierr = PetscMalloc1(n_local,&bjac->starts);CHKERRQ(ierr);
1017       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1018 
1019       jac->data = (void*)bjac;
1020       ierr      = PetscMalloc1(n_local,&bjac->is);CHKERRQ(ierr);
1021       ierr      = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1022 
1023       for (i=0; i<n_local; i++) {
1024         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1025         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
1026         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1027         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
1028         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1029         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1030         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1031         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1032         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1033 
1034         jac->ksp[i] = ksp;
1035       }
1036     } else {
1037       bjac = (PC_BJacobi_Multiblock*)jac->data;
1038     }
1039 
1040     start = 0;
1041     ierr = MatGetVecType(pmat,&vectype);CHKERRQ(ierr);
1042     for (i=0; i<n_local; i++) {
1043       m = jac->l_lens[i];
1044       /*
1045       The reason we need to generate these vectors is to serve
1046       as the right-hand side and solution vector for the solve on the
1047       block. We do not need to allocate space for the vectors since
1048       that is provided via VecPlaceArray() just before the call to
1049       KSPSolve() on the block.
1050 
1051       */
1052       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1053       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr);
1054       ierr = VecSetType(x,vectype);CHKERRQ(ierr);
1055       ierr = VecSetType(y,vectype);CHKERRQ(ierr);
1056       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)x);CHKERRQ(ierr);
1057       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)y);CHKERRQ(ierr);
1058 
1059       bjac->x[i]      = x;
1060       bjac->y[i]      = y;
1061       bjac->starts[i] = start;
1062 
1063       ierr        = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1064       bjac->is[i] = is;
1065       ierr        = PetscLogObjectParent((PetscObject)pc,(PetscObject)is);CHKERRQ(ierr);
1066 
1067       start += m;
1068     }
1069   } else {
1070     bjac = (PC_BJacobi_Multiblock*)jac->data;
1071     /*
1072        Destroy the blocks from the previous iteration
1073     */
1074     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1075       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1076       if (pc->useAmat) {
1077         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1078       }
1079       scall = MAT_INITIAL_MATRIX;
1080     } else scall = MAT_REUSE_MATRIX;
1081   }
1082 
1083   ierr = MatCreateSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1084   if (pc->useAmat) {
1085     ierr = MatCreateSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1086   }
1087   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1088      different boundary conditions for the submatrices than for the global problem) */
1089   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1090 
1091   for (i=0; i<n_local; i++) {
1092     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);CHKERRQ(ierr);
1093     ierr = KSPGetOptionsPrefix(jac->ksp[i],&prefix);CHKERRQ(ierr);
1094     if (pc->useAmat) {
1095       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);CHKERRQ(ierr);
1096       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);CHKERRQ(ierr);
1097       ierr = MatSetOptionsPrefix(bjac->mat[i],prefix);CHKERRQ(ierr);
1098     } else {
1099       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);CHKERRQ(ierr);
1100     }
1101     ierr = MatSetOptionsPrefix(bjac->pmat[i],prefix);CHKERRQ(ierr);
1102     if (pc->setfromoptionscalled) {
1103       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1104     }
1105   }
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 /* ---------------------------------------------------------------------------------------------*/
1110 /*
1111       These are for a single block with multiple processes;
1112 */
1113 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1114 {
1115   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1116   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1117   PetscErrorCode       ierr;
1118 
1119   PetscFunctionBegin;
1120   ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr);
1121   ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr);
1122   ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);
1123   if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);}
1124   PetscFunctionReturn(0);
1125 }
1126 
1127 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1128 {
1129   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1130   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1131   PetscErrorCode       ierr;
1132 
1133   PetscFunctionBegin;
1134   ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr);
1135   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
1136   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1137   ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr);
1138 
1139   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1140   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1145 {
1146   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1147   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1148   PetscErrorCode       ierr;
1149   PetscScalar          *yarray;
1150   const PetscScalar    *xarray;
1151   KSPConvergedReason   reason;
1152 
1153   PetscFunctionBegin;
1154   /* place x's and y's local arrays into xsub and ysub */
1155   ierr = VecGetArrayRead(x,&xarray);CHKERRQ(ierr);
1156   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1157   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1158   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1159 
1160   /* apply preconditioner on each matrix block */
1161   ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1162   ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1163   ierr = KSPCheckSolve(jac->ksp[0],pc,mpjac->ysub);CHKERRQ(ierr);
1164   ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1165   ierr = KSPGetConvergedReason(jac->ksp[0],&reason);CHKERRQ(ierr);
1166   if (reason == KSP_DIVERGED_PC_FAILED) {
1167     pc->failedreason = PC_SUBPC_ERROR;
1168   }
1169 
1170   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1171   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1172   ierr = VecRestoreArrayRead(x,&xarray);CHKERRQ(ierr);
1173   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 static PetscErrorCode PCMatApply_BJacobi_Multiproc(PC pc,Mat X,Mat Y)
1178 {
1179   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1180   KSPConvergedReason   reason;
1181   Mat                  sX,sY;
1182   const PetscScalar    *x;
1183   PetscScalar          *y;
1184   PetscInt             m,N,lda,ldb;
1185   PetscErrorCode       ierr;
1186 
1187   PetscFunctionBegin;
1188   /* apply preconditioner on each matrix block */
1189   ierr = MatGetLocalSize(X,&m,NULL);CHKERRQ(ierr);
1190   ierr = MatGetSize(X,NULL,&N);CHKERRQ(ierr);
1191   ierr = MatDenseGetLDA(X,&lda);CHKERRQ(ierr);
1192   ierr = MatDenseGetLDA(Y,&ldb);CHKERRQ(ierr);
1193   ierr = MatDenseGetArrayRead(X,&x);CHKERRQ(ierr);
1194   ierr = MatDenseGetArrayWrite(Y,&y);CHKERRQ(ierr);
1195   ierr = MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]),m,PETSC_DECIDE,PETSC_DECIDE,N,(PetscScalar*)x,&sX);CHKERRQ(ierr);
1196   ierr = MatCreateDense(PetscObjectComm((PetscObject)jac->ksp[0]),m,PETSC_DECIDE,PETSC_DECIDE,N,y,&sY);CHKERRQ(ierr);
1197   ierr = MatDenseSetLDA(sX,lda);CHKERRQ(ierr);
1198   ierr = MatDenseSetLDA(sY,ldb);CHKERRQ(ierr);
1199   ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[0],X,Y,0);CHKERRQ(ierr);
1200   ierr = KSPMatSolve(jac->ksp[0],sX,sY);CHKERRQ(ierr);
1201   ierr = KSPCheckSolve(jac->ksp[0],pc,NULL);CHKERRQ(ierr);
1202   ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[0],X,Y,0);CHKERRQ(ierr);
1203   ierr = MatDestroy(&sY);CHKERRQ(ierr);
1204   ierr = MatDestroy(&sX);CHKERRQ(ierr);
1205   ierr = MatDenseRestoreArrayWrite(Y,&y);CHKERRQ(ierr);
1206   ierr = MatDenseRestoreArrayRead(X,&x);CHKERRQ(ierr);
1207   ierr = KSPGetConvergedReason(jac->ksp[0],&reason);CHKERRQ(ierr);
1208   if (reason == KSP_DIVERGED_PC_FAILED) {
1209     pc->failedreason = PC_SUBPC_ERROR;
1210   }
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1215 {
1216   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1217   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1218   PetscErrorCode       ierr;
1219   PetscInt             m,n;
1220   MPI_Comm             comm,subcomm=0;
1221   const char           *prefix;
1222   PetscBool            wasSetup = PETSC_TRUE;
1223   VecType              vectype;
1224 
1225 
1226   PetscFunctionBegin;
1227   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1228   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1229   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1230   if (!pc->setupcalled) {
1231     wasSetup  = PETSC_FALSE;
1232     ierr      = PetscNewLog(pc,&mpjac);CHKERRQ(ierr);
1233     jac->data = (void*)mpjac;
1234 
1235     /* initialize datastructure mpjac */
1236     if (!jac->psubcomm) {
1237       /* Create default contiguous subcommunicatiors if user does not provide them */
1238       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1239       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1240       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1241       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1242     }
1243     mpjac->psubcomm = jac->psubcomm;
1244     subcomm         = PetscSubcommChild(mpjac->psubcomm);
1245 
1246     /* Get matrix blocks of pmat */
1247     ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1248 
1249     /* create a new PC that processors in each subcomm have copy of */
1250     ierr = PetscMalloc1(1,&jac->ksp);CHKERRQ(ierr);
1251     ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr);
1252     ierr = KSPSetErrorIfNotConverged(jac->ksp[0],pc->erroriffailure);CHKERRQ(ierr);
1253     ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr);
1254     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);CHKERRQ(ierr);
1255     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1256     ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr);
1257 
1258     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1259     ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr);
1260     ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr);
1261     ierr = KSPGetOptionsPrefix(jac->ksp[0],&prefix);CHKERRQ(ierr);
1262     ierr = MatSetOptionsPrefix(mpjac->submats,prefix);CHKERRQ(ierr);
1263 
1264     /* create dummy vectors xsub and ysub */
1265     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1266     ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr);
1267     ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr);
1268     ierr = MatGetVecType(mpjac->submats,&vectype);CHKERRQ(ierr);
1269     ierr = VecSetType(mpjac->xsub,vectype);CHKERRQ(ierr);
1270     ierr = VecSetType(mpjac->ysub,vectype);CHKERRQ(ierr);
1271     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);CHKERRQ(ierr);
1272     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);CHKERRQ(ierr);
1273 
1274     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1275     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1276     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1277     pc->ops->matapply= PCMatApply_BJacobi_Multiproc;
1278   } else { /* pc->setupcalled */
1279     subcomm = PetscSubcommChild(mpjac->psubcomm);
1280     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1281       /* destroy old matrix blocks, then get new matrix blocks */
1282       if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);}
1283       ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1284     } else {
1285       ierr = MatGetMultiProcBlock(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1286     }
1287     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1288   }
1289 
1290   if (!wasSetup && pc->setfromoptionscalled) {
1291     ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr);
1292   }
1293   PetscFunctionReturn(0);
1294 }
1295