xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision 18d3aa3f5b7b6cf3b58215079fab4e2a5c7adae3)
1 #define PETSCKSP_DLL
2 
3 /*
4    Defines a block Jacobi preconditioner.
5 */
6 #include "private/pcimpl.h"              /*I "petscpc.h" I*/
7 #include "../src/ksp/pc/impls/bjacobi/bjacobi.h"
8 
9 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
11 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
12 
13 #undef __FUNCT__
14 #define __FUNCT__ "PCSetUp_BJacobi"
15 static PetscErrorCode PCSetUp_BJacobi(PC pc)
16 {
17   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
18   Mat            mat = pc->mat,pmat = pc->pmat;
19   PetscErrorCode ierr,(*f)(Mat,PetscBool *,MatReuse,Mat*);
20   PetscInt       N,M,start,i,sum,end;
21   PetscInt       bs,i_start=-1,i_end=-1;
22   PetscMPIInt    rank,size;
23   const char     *pprefix,*mprefix;
24 
25   PetscFunctionBegin;
26   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
27   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
28   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
29   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
30 
31   if (jac->n > 0 && jac->n < size){
32     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
33     PetscFunctionReturn(0);
34   }
35 
36   /* --------------------------------------------------------------------------
37       Determines the number of blocks assigned to each processor
38   -----------------------------------------------------------------------------*/
39 
40   /*   local block count  given */
41   if (jac->n_local > 0 && jac->n < 0) {
42     ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
43     if (jac->l_lens) { /* check that user set these correctly */
44       sum = 0;
45       for (i=0; i<jac->n_local; i++) {
46         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
47         sum += jac->l_lens[i];
48       }
49       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
50     } else {
51       ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
52       for (i=0; i<jac->n_local; i++) {
53         jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
54       }
55     }
56   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
57     /* global blocks given: determine which ones are local */
58     if (jac->g_lens) {
59       /* check if the g_lens is has valid entries */
60       for (i=0; i<jac->n; i++) {
61         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
62         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
63       }
64       if (size == 1) {
65         jac->n_local = jac->n;
66         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
67         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
68         /* check that user set these correctly */
69         sum = 0;
70         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
71         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
72       } else {
73         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
74         /* loop over blocks determing first one owned by me */
75         sum = 0;
76         for (i=0; i<jac->n+1; i++) {
77           if (sum == start) { i_start = i; goto start_1;}
78           if (i < jac->n) sum += jac->g_lens[i];
79         }
80         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
81  start_1:
82         for (i=i_start; i<jac->n+1; i++) {
83           if (sum == end) { i_end = i; goto end_1; }
84           if (i < jac->n) sum += jac->g_lens[i];
85         }
86         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
87  end_1:
88         jac->n_local = i_end - i_start;
89         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
90         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
91       }
92     } else { /* no global blocks given, determine then using default layout */
93       jac->n_local = jac->n/size + ((jac->n % size) > rank);
94       ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
95       for (i=0; i<jac->n_local; i++) {
96         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
97         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
98       }
99     }
100   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
101     jac->n         = size;
102     jac->n_local   = 1;
103     ierr           = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
104     jac->l_lens[0] = M;
105   }
106   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
107 
108   /* -------------------------
109       Determines mat and pmat
110   ---------------------------*/
111   ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
112   if (size == 1 && !f) {
113     mat  = pc->mat;
114     pmat = pc->pmat;
115   } else {
116     PetscBool  iscopy;
117     MatReuse   scall;
118 
119     if (jac->use_true_local) {
120       /* use block from true matrix, not preconditioner matrix for local MatMult() */
121       scall = MAT_INITIAL_MATRIX;
122       if (pc->setupcalled) {
123         if (pc->flag == SAME_NONZERO_PATTERN) {
124           if (jac->tp_mat) {
125             scall = MAT_REUSE_MATRIX;
126             mat   = jac->tp_mat;
127           }
128         } else {
129           if (jac->tp_mat)  {
130             ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr);
131           }
132         }
133       }
134       if (!f) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"This matrix does not support getting diagonal block");
135       ierr = (*f)(pc->mat,&iscopy,scall,&mat);CHKERRQ(ierr);
136       /* make submatrix have same prefix as entire matrix */
137       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr);
138       ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr);
139       if (iscopy) {
140         jac->tp_mat = mat;
141       }
142     }
143     if (pc->pmat != pc->mat || !jac->use_true_local) {
144       scall = MAT_INITIAL_MATRIX;
145       if (pc->setupcalled) {
146         if (pc->flag == SAME_NONZERO_PATTERN) {
147           if (jac->tp_pmat) {
148             scall = MAT_REUSE_MATRIX;
149             pmat   = jac->tp_pmat;
150           }
151         } else {
152           if (jac->tp_pmat)  {
153             ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr);
154           }
155         }
156       }
157       ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
158       if (!f) {
159         const char *type;
160         ierr = PetscObjectGetType((PetscObject) pc->pmat,&type);CHKERRQ(ierr);
161         SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"This matrix type, %s, does not support getting diagonal block", type);
162       }
163       ierr = (*f)(pc->pmat,&iscopy,scall,&pmat);CHKERRQ(ierr);
164       /* make submatrix have same prefix as entire matrix */
165       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
166       ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr);
167       if (iscopy) {
168         jac->tp_pmat = pmat;
169       }
170     } else {
171       pmat = mat;
172     }
173   }
174 
175   /* ------
176      Setup code depends on the number of blocks
177   */
178   if (jac->n_local == 1) {
179     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
180   } else {
181     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
182   }
183   PetscFunctionReturn(0);
184 }
185 
186 /* Default destroy, if it has never been setup */
187 #undef __FUNCT__
188 #define __FUNCT__ "PCDestroy_BJacobi"
189 static PetscErrorCode PCDestroy_BJacobi(PC pc)
190 {
191   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
192   PetscErrorCode ierr;
193 
194   PetscFunctionBegin;
195   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
196   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
197   ierr = PetscFree(pc->data);CHKERRQ(ierr);
198   PetscFunctionReturn(0);
199 }
200 
201 #undef __FUNCT__
202 #define __FUNCT__ "PCSetFromOptions_BJacobi"
203 
204 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc)
205 {
206   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
207   PetscErrorCode ierr;
208   PetscInt       blocks;
209   PetscBool      flg;
210 
211   PetscFunctionBegin;
212   ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr);
213     ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
214     if (flg) {
215       ierr = PCBJacobiSetTotalBlocks(pc,blocks,PETSC_NULL);CHKERRQ(ierr);
216     }
217     flg  = PETSC_FALSE;
218     ierr = PetscOptionsBool("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
219     if (flg) {
220       ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr);
221     }
222   ierr = PetscOptionsTail();CHKERRQ(ierr);
223   PetscFunctionReturn(0);
224 }
225 
226 #undef __FUNCT__
227 #define __FUNCT__ "PCView_BJacobi"
228 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
229 {
230   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
231   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
232   PetscErrorCode       ierr;
233   PetscMPIInt          rank;
234   PetscInt             i;
235   PetscBool            iascii,isstring;
236   PetscViewer          sviewer;
237 
238   PetscFunctionBegin;
239   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
240   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
241   if (iascii) {
242     if (jac->use_true_local) {
243       ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: using true local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
244     }
245     ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr);
246     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
247     if (jac->same_local_solves) {
248       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
249       if (jac->ksp) {
250         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
251         if (!rank){
252           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
253           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
254           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
255         }
256         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
257       } else if (jac->psubcomm && !jac->psubcomm->color){
258         ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr);
259         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
260         ierr = KSPView(mpjac->ksp,sviewer);CHKERRQ(ierr);
261         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
262       }
263     } else {
264       PetscInt n_global;
265       ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
266       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
267       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
268                    rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
269       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
270       for (i=0; i<n_global; i++) {
271         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
272         if (i < jac->n_local) {
273           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
274           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
275           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
276         }
277         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
278       }
279       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
280       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
281     }
282   } else if (isstring) {
283     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
284     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
285     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
286     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
287   } else {
288     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for block Jacobi",((PetscObject)viewer)->type_name);
289   }
290   PetscFunctionReturn(0);
291 }
292 
293 /* -------------------------------------------------------------------------------------*/
294 
295 EXTERN_C_BEGIN
296 #undef __FUNCT__
297 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi"
298 PetscErrorCode  PCBJacobiSetUseTrueLocal_BJacobi(PC pc)
299 {
300   PC_BJacobi   *jac;
301 
302   PetscFunctionBegin;
303   jac                 = (PC_BJacobi*)pc->data;
304   jac->use_true_local = PETSC_TRUE;
305   PetscFunctionReturn(0);
306 }
307 EXTERN_C_END
308 
309 EXTERN_C_BEGIN
310 #undef __FUNCT__
311 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi"
312 PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
313 {
314   PC_BJacobi   *jac = (PC_BJacobi*)pc->data;;
315 
316   PetscFunctionBegin;
317   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
318 
319   if (n_local)     *n_local     = jac->n_local;
320   if (first_local) *first_local = jac->first_local;
321   *ksp                          = jac->ksp;
322   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
323                                                   not necessarily true though!  This flag is
324                                                   used only for PCView_BJacobi() */
325   PetscFunctionReturn(0);
326 }
327 EXTERN_C_END
328 
329 EXTERN_C_BEGIN
330 #undef __FUNCT__
331 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi"
332 PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
333 {
334   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
335   PetscErrorCode ierr;
336 
337   PetscFunctionBegin;
338 
339   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
340   jac->n = blocks;
341   if (!lens) {
342     jac->g_lens = 0;
343   } else {
344     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->g_lens);CHKERRQ(ierr);
345     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
346     ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
347   }
348   PetscFunctionReturn(0);
349 }
350 EXTERN_C_END
351 
352 EXTERN_C_BEGIN
353 #undef __FUNCT__
354 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi"
355 PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
356 {
357   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
358 
359   PetscFunctionBegin;
360   *blocks = jac->n;
361   if (lens) *lens = jac->g_lens;
362   PetscFunctionReturn(0);
363 }
364 EXTERN_C_END
365 
366 EXTERN_C_BEGIN
367 #undef __FUNCT__
368 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi"
369 PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
370 {
371   PC_BJacobi     *jac;
372   PetscErrorCode ierr;
373 
374   PetscFunctionBegin;
375   jac = (PC_BJacobi*)pc->data;
376 
377   jac->n_local = blocks;
378   if (!lens) {
379     jac->l_lens = 0;
380   } else {
381     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
382     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
383     ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
384   }
385   PetscFunctionReturn(0);
386 }
387 EXTERN_C_END
388 
389 EXTERN_C_BEGIN
390 #undef __FUNCT__
391 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi"
392 PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
393 {
394   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
395 
396   PetscFunctionBegin;
397   *blocks = jac->n_local;
398   if (lens) *lens = jac->l_lens;
399   PetscFunctionReturn(0);
400 }
401 EXTERN_C_END
402 
403 /* -------------------------------------------------------------------------------------*/
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "PCBJacobiSetUseTrueLocal"
407 /*@
408    PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block
409    problem is associated with the linear system matrix instead of the
410    default (where it is associated with the preconditioning matrix).
411    That is, if the local system is solved iteratively then it iterates
412    on the block from the matrix using the block from the preconditioner
413    as the preconditioner for the local block.
414 
415    Logically Collective on PC
416 
417    Input Parameters:
418 .  pc - the preconditioner context
419 
420    Options Database Key:
421 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
422 
423    Notes:
424    For the common case in which the preconditioning and linear
425    system matrices are identical, this routine is unnecessary.
426 
427    Level: intermediate
428 
429 .keywords:  block, Jacobi, set, true, local, flag
430 
431 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks()
432 @*/
433 PetscErrorCode  PCBJacobiSetUseTrueLocal(PC pc)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
439   ierr = PetscTryMethod(pc,"PCBJacobiSetUseTrueLocal_C",(PC),(pc));CHKERRQ(ierr);
440   PetscFunctionReturn(0);
441 }
442 
443 #undef __FUNCT__
444 #define __FUNCT__ "PCBJacobiGetSubKSP"
445 /*@C
446    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
447    this processor.
448 
449    Note Collective
450 
451    Input Parameter:
452 .  pc - the preconditioner context
453 
454    Output Parameters:
455 +  n_local - the number of blocks on this processor, or PETSC_NULL
456 .  first_local - the global number of the first block on this processor, or PETSC_NULL
457 -  ksp - the array of KSP contexts
458 
459    Notes:
460    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
461 
462    Currently for some matrix implementations only 1 block per processor
463    is supported.
464 
465    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
466 
467    Level: advanced
468 
469 .keywords:  block, Jacobi, get, sub, KSP, context
470 
471 .seealso: PCBJacobiGetSubKSP()
472 @*/
473 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
474 {
475   PetscErrorCode ierr;
476 
477   PetscFunctionBegin;
478   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
479   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt *,PetscInt *,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "PCBJacobiSetTotalBlocks"
485 /*@
486    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
487    Jacobi preconditioner.
488 
489    Collective on PC
490 
491    Input Parameters:
492 +  pc - the preconditioner context
493 .  blocks - the number of blocks
494 -  lens - [optional] integer array containing the size of each block
495 
496    Options Database Key:
497 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
498 
499    Notes:
500    Currently only a limited number of blocking configurations are supported.
501    All processors sharing the PC must call this routine with the same data.
502 
503    Level: intermediate
504 
505 .keywords:  set, number, Jacobi, global, total, blocks
506 
507 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks()
508 @*/
509 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
510 {
511   PetscErrorCode ierr;
512 
513   PetscFunctionBegin;
514   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
515   if (blocks <= 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
516   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
517   PetscFunctionReturn(0);
518 }
519 
520 #undef __FUNCT__
521 #define __FUNCT__ "PCBJacobiGetTotalBlocks"
522 /*@C
523    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
524    Jacobi preconditioner.
525 
526    Not Collective
527 
528    Input Parameter:
529 .  pc - the preconditioner context
530 
531    Output parameters:
532 +  blocks - the number of blocks
533 -  lens - integer array containing the size of each block
534 
535    Level: intermediate
536 
537 .keywords:  get, number, Jacobi, global, total, blocks
538 
539 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks()
540 @*/
541 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
542 {
543   PetscErrorCode ierr;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
547   PetscValidIntPointer(blocks,2);
548   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
549   PetscFunctionReturn(0);
550 }
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "PCBJacobiSetLocalBlocks"
554 /*@
555    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
556    Jacobi preconditioner.
557 
558    Not Collective
559 
560    Input Parameters:
561 +  pc - the preconditioner context
562 .  blocks - the number of blocks
563 -  lens - [optional] integer array containing size of each block
564 
565    Note:
566    Currently only a limited number of blocking configurations are supported.
567 
568    Level: intermediate
569 
570 .keywords: PC, set, number, Jacobi, local, blocks
571 
572 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks()
573 @*/
574 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
575 {
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin;
579   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
580   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
581   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
582   PetscFunctionReturn(0);
583 }
584 
585 #undef __FUNCT__
586 #define __FUNCT__ "PCBJacobiGetLocalBlocks"
587 /*@C
588    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
589    Jacobi preconditioner.
590 
591    Not Collective
592 
593    Input Parameters:
594 +  pc - the preconditioner context
595 .  blocks - the number of blocks
596 -  lens - [optional] integer array containing size of each block
597 
598    Note:
599    Currently only a limited number of blocking configurations are supported.
600 
601    Level: intermediate
602 
603 .keywords: PC, get, number, Jacobi, local, blocks
604 
605 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks()
606 @*/
607 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
608 {
609   PetscErrorCode ierr;
610 
611   PetscFunctionBegin;
612   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
613   PetscValidIntPointer(blocks,2);
614   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
615   PetscFunctionReturn(0);
616 }
617 
618 /* -----------------------------------------------------------------------------------*/
619 
620 /*MC
621    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
622            its own KSP object.
623 
624    Options Database Keys:
625 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
626 
627    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
628      than one processor. Defaults to one block per processor.
629 
630      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
631         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
632 
633      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
634          and set the options directly on the resulting KSP object (you can access its PC
635          KSPGetPC())
636 
637    Level: beginner
638 
639    Concepts: block Jacobi
640 
641 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
642            PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
643            PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
644 M*/
645 
646 EXTERN_C_BEGIN
647 #undef __FUNCT__
648 #define __FUNCT__ "PCCreate_BJacobi"
649 PetscErrorCode  PCCreate_BJacobi(PC pc)
650 {
651   PetscErrorCode ierr;
652   PetscMPIInt    rank;
653   PC_BJacobi     *jac;
654 
655   PetscFunctionBegin;
656   ierr = PetscNewLog(pc,PC_BJacobi,&jac);CHKERRQ(ierr);
657   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
658   pc->ops->apply              = 0;
659   pc->ops->applytranspose     = 0;
660   pc->ops->setup              = PCSetUp_BJacobi;
661   pc->ops->destroy            = PCDestroy_BJacobi;
662   pc->ops->setfromoptions     = PCSetFromOptions_BJacobi;
663   pc->ops->view               = PCView_BJacobi;
664   pc->ops->applyrichardson    = 0;
665 
666   pc->data               = (void*)jac;
667   jac->n                 = -1;
668   jac->n_local           = -1;
669   jac->first_local       = rank;
670   jac->ksp               = 0;
671   jac->use_true_local    = PETSC_FALSE;
672   jac->same_local_solves = PETSC_TRUE;
673   jac->g_lens            = 0;
674   jac->l_lens            = 0;
675   jac->tp_mat            = 0;
676   jac->tp_pmat           = 0;
677   jac->psubcomm          = 0;
678 
679   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C",
680                     "PCBJacobiSetUseTrueLocal_BJacobi",
681                     PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr);
682   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi",
683                     PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
684   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi",
685                     PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
686   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi",
687                     PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
688   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi",
689                     PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
690   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi",
691                     PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
692 
693   PetscFunctionReturn(0);
694 }
695 EXTERN_C_END
696 
697 /* --------------------------------------------------------------------------------------------*/
698 /*
699         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
700 */
701 #undef __FUNCT__
702 #define __FUNCT__ "PCReset_BJacobi_Singleblock"
703 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
704 {
705   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
706   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
707   PetscErrorCode         ierr;
708 
709   PetscFunctionBegin;
710   /*
711         If the on processor block had to be generated via a MatGetDiagonalBlock()
712      that creates a copy, this frees the space
713   */
714   if (jac->tp_mat) {
715     ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr);
716   }
717   if (jac->tp_pmat) {
718     ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr);
719   }
720 
721   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
722   if (bjac->x) {ierr = VecDestroy(bjac->x);CHKERRQ(ierr);}
723   if (bjac->y) {ierr = VecDestroy(bjac->y);CHKERRQ(ierr);}
724   PetscFunctionReturn(0);
725 }
726 
727 #undef __FUNCT__
728 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock"
729 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
730 {
731   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
732   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
733   PetscErrorCode         ierr;
734 
735   PetscFunctionBegin;
736   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
737   ierr = KSPDestroy(jac->ksp[0]);CHKERRQ(ierr);
738   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
739   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
740   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
741   ierr = PetscFree(bjac);CHKERRQ(ierr);
742   ierr = PetscFree(pc->data);CHKERRQ(ierr);
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock"
748 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
749 {
750   PetscErrorCode ierr;
751   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
752 
753   PetscFunctionBegin;
754   ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr);
755   PetscFunctionReturn(0);
756 }
757 
758 #undef __FUNCT__
759 #define __FUNCT__ "PCApply_BJacobi_Singleblock"
760 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
761 {
762   PetscErrorCode         ierr;
763   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
764   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
765   PetscScalar            *x_array,*y_array;
766 
767   PetscFunctionBegin;
768   /*
769       The VecPlaceArray() is to avoid having to copy the
770     y vector into the bjac->x vector. The reason for
771     the bjac->x vector is that we need a sequential vector
772     for the sequential solve.
773   */
774   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
775   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
776   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
777   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
778   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
779   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
780   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
781   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
782   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
783   PetscFunctionReturn(0);
784 }
785 
786 #undef __FUNCT__
787 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock"
788 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
789 {
790   PetscErrorCode         ierr;
791   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
792   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
793   PetscScalar            *x_array,*y_array;
794   PC                     subpc;
795 
796   PetscFunctionBegin;
797   /*
798       The VecPlaceArray() is to avoid having to copy the
799     y vector into the bjac->x vector. The reason for
800     the bjac->x vector is that we need a sequential vector
801     for the sequential solve.
802   */
803   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
804   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
805   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
806   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
807 
808   /* apply the symmetric left portion of the inner PC operator */
809   /* note this by-passes the inner KSP and its options completely */
810 
811   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
812   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
813   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
814   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
815 
816   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
817   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
818   PetscFunctionReturn(0);
819 }
820 
821 #undef __FUNCT__
822 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock"
823 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
824 {
825   PetscErrorCode         ierr;
826   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
827   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
828   PetscScalar            *x_array,*y_array;
829   PC                     subpc;
830 
831   PetscFunctionBegin;
832   /*
833       The VecPlaceArray() is to avoid having to copy the
834     y vector into the bjac->x vector. The reason for
835     the bjac->x vector is that we need a sequential vector
836     for the sequential solve.
837   */
838   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
839   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
840   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
841   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
842 
843   /* apply the symmetric right portion of the inner PC operator */
844   /* note this by-passes the inner KSP and its options completely */
845 
846   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
847   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
848 
849   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
850   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock"
856 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
857 {
858   PetscErrorCode         ierr;
859   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
860   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
861   PetscScalar            *x_array,*y_array;
862 
863   PetscFunctionBegin;
864   /*
865       The VecPlaceArray() is to avoid having to copy the
866     y vector into the bjac->x vector. The reason for
867     the bjac->x vector is that we need a sequential vector
868     for the sequential solve.
869   */
870   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
871   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
872   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
873   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
874   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
875   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
876   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
877   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
878   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
879   PetscFunctionReturn(0);
880 }
881 
882 #undef __FUNCT__
883 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock"
884 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
885 {
886   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
887   PetscErrorCode         ierr;
888   PetscInt               m;
889   KSP                    ksp;
890   Vec                    x,y;
891   PC_BJacobi_Singleblock *bjac;
892   PetscBool              wasSetup;
893 
894   PetscFunctionBegin;
895 
896   /* set default direct solver with no Krylov method */
897   if (!pc->setupcalled) {
898     const char *prefix;
899     wasSetup = PETSC_FALSE;
900     ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
901     ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
902     ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
903     ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
904     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
905     ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
906     ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
907     /*
908       The reason we need to generate these vectors is to serve
909       as the right-hand side and solution vector for the solve on the
910       block. We do not need to allocate space for the vectors since
911       that is provided via VecPlaceArray() just before the call to
912       KSPSolve() on the block.
913     */
914     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
915     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&x);CHKERRQ(ierr);
916     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr);
917     ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr);
918     ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr);
919 
920     pc->ops->reset               = PCReset_BJacobi_Singleblock;
921     pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
922     pc->ops->apply               = PCApply_BJacobi_Singleblock;
923     pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
924     pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
925     pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
926     pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
927 
928     ierr = PetscNewLog(pc,PC_BJacobi_Singleblock,&bjac);CHKERRQ(ierr);
929     bjac->x      = x;
930     bjac->y      = y;
931 
932     ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
933     jac->ksp[0] = ksp;
934     jac->data    = (void*)bjac;
935   } else {
936     wasSetup = PETSC_TRUE;
937     ksp = jac->ksp[0];
938     bjac = (PC_BJacobi_Singleblock *)jac->data;
939   }
940   if (jac->use_true_local) {
941     ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr);
942   }  else {
943     ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr);
944   }
945   if (!wasSetup && pc->setfromoptionscalled) {
946     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
947   }
948   PetscFunctionReturn(0);
949 }
950 
951 /* ---------------------------------------------------------------------------------------------*/
952 #undef __FUNCT__
953 #define __FUNCT__ "PCReset_BJacobi_Multiblock"
954 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
955 {
956   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
957   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
958   PetscErrorCode        ierr;
959   PetscInt              i;
960 
961   PetscFunctionBegin;
962   if (bjac && bjac->pmat) {
963     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
964     if (jac->use_true_local) {
965       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
966     }
967   }
968 
969   /*
970         If the on processor block had to be generated via a MatGetDiagonalBlock()
971      that creates a copy, this frees the space
972   */
973   if (jac->tp_mat) {
974     ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr);
975   }
976   if (jac->tp_pmat) {
977     ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr);
978   }
979 
980   for (i=0; i<jac->n_local; i++) {
981     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
982     if (bjac && bjac->x) {
983       ierr = VecDestroy(bjac->x[i]);CHKERRQ(ierr);
984       ierr = VecDestroy(bjac->y[i]);CHKERRQ(ierr);
985       ierr = ISDestroy(bjac->is[i]);CHKERRQ(ierr);
986     }
987   }
988   if (bjac) {
989     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
990     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
991     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
992   }
993   ierr = PetscFree(jac->data);CHKERRQ(ierr);
994   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
995   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
996   PetscFunctionReturn(0);
997 }
998 
999 #undef __FUNCT__
1000 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock"
1001 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
1002 {
1003   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1004   PetscErrorCode        ierr;
1005   PetscInt              i;
1006 
1007   PetscFunctionBegin;
1008   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
1009   for (i=0; i<jac->n_local; i++) {
1010     ierr = KSPDestroy(jac->ksp[i]);CHKERRQ(ierr);
1011   }
1012   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1013   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1014   PetscFunctionReturn(0);
1015 }
1016 
1017 #undef __FUNCT__
1018 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock"
1019 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
1020 {
1021   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
1022   PetscErrorCode ierr;
1023   PetscInt       i,n_local = jac->n_local;
1024 
1025   PetscFunctionBegin;
1026   for (i=0; i<n_local; i++) {
1027     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
1028   }
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 /*
1033       Preconditioner for block Jacobi
1034 */
1035 #undef __FUNCT__
1036 #define __FUNCT__ "PCApply_BJacobi_Multiblock"
1037 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1038 {
1039   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1040   PetscErrorCode        ierr;
1041   PetscInt              i,n_local = jac->n_local;
1042   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1043   PetscScalar           *xin,*yin;
1044 
1045   PetscFunctionBegin;
1046   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1047   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1048   for (i=0; i<n_local; i++) {
1049     /*
1050        To avoid copying the subvector from x into a workspace we instead
1051        make the workspace vector array point to the subpart of the array of
1052        the global vector.
1053     */
1054     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1055     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1056 
1057     ierr = PetscLogEventBegin(PC_SetUpOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1058     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1059     ierr = PetscLogEventEnd(PC_SetUpOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1060 
1061     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
1062     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
1063   }
1064   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1065   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1066   PetscFunctionReturn(0);
1067 }
1068 
1069 /*
1070       Preconditioner for block Jacobi
1071 */
1072 #undef __FUNCT__
1073 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock"
1074 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1075 {
1076   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1077   PetscErrorCode        ierr;
1078   PetscInt              i,n_local = jac->n_local;
1079   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1080   PetscScalar           *xin,*yin;
1081 
1082   PetscFunctionBegin;
1083   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1084   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1085   for (i=0; i<n_local; i++) {
1086     /*
1087        To avoid copying the subvector from x into a workspace we instead
1088        make the workspace vector array point to the subpart of the array of
1089        the global vector.
1090     */
1091     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1092     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1093 
1094     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1095     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1096     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1097   }
1098   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1099   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 #undef __FUNCT__
1104 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock"
1105 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
1106 {
1107   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
1108   PetscErrorCode         ierr;
1109   PetscInt               m,n_local,N,M,start,i;
1110   const char             *prefix,*pprefix,*mprefix;
1111   KSP                    ksp;
1112   Vec                    x,y;
1113   PC_BJacobi_Multiblock  *bjac = (PC_BJacobi_Multiblock*)jac->data;
1114   PC                     subpc;
1115   IS                     is;
1116   MatReuse               scall = MAT_REUSE_MATRIX;
1117 
1118   PetscFunctionBegin;
1119   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
1120 
1121   n_local = jac->n_local;
1122 
1123   if (jac->use_true_local) {
1124     PetscBool  same;
1125     ierr = PetscTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
1126     if (!same) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1127   }
1128 
1129   if (!pc->setupcalled) {
1130     scall                  = MAT_INITIAL_MATRIX;
1131     pc->ops->reset         = PCReset_BJacobi_Multiblock;
1132     pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1133     pc->ops->apply         = PCApply_BJacobi_Multiblock;
1134     pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1135     pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1136 
1137     ierr = PetscNewLog(pc,PC_BJacobi_Multiblock,&bjac);CHKERRQ(ierr);
1138     ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1139     ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1140     ierr = PetscMalloc2(n_local,Vec,&bjac->x,n_local,Vec,&bjac->y);CHKERRQ(ierr);
1141     ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr);
1142     ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1143 
1144     jac->data    = (void*)bjac;
1145     ierr = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr);
1146     ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1147 
1148     start = 0;
1149     for (i=0; i<n_local; i++) {
1150       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1151       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1152       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
1153       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1154       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1155       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1156       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1157       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1158 
1159       m = jac->l_lens[i];
1160 
1161       /*
1162       The reason we need to generate these vectors is to serve
1163       as the right-hand side and solution vector for the solve on the
1164       block. We do not need to allocate space for the vectors since
1165       that is provided via VecPlaceArray() just before the call to
1166       KSPSolve() on the block.
1167 
1168       */
1169       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1170       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr);
1171       ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr);
1172       ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr);
1173       bjac->x[i]      = x;
1174       bjac->y[i]      = y;
1175       bjac->starts[i] = start;
1176       jac->ksp[i]    = ksp;
1177 
1178       ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1179       bjac->is[i] = is;
1180       ierr = PetscLogObjectParent(pc,is);CHKERRQ(ierr);
1181 
1182       start += m;
1183     }
1184   } else {
1185     bjac = (PC_BJacobi_Multiblock*)jac->data;
1186     /*
1187        Destroy the blocks from the previous iteration
1188     */
1189     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1190       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1191       if (jac->use_true_local) {
1192         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1193       }
1194       scall = MAT_INITIAL_MATRIX;
1195     }
1196   }
1197 
1198   ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1199   if (jac->use_true_local) {
1200     ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr);
1201     ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1202   }
1203   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1204      different boundary conditions for the submatrices than for the global problem) */
1205   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1206 
1207   ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr);
1208   for (i=0; i<n_local; i++) {
1209     ierr = PetscLogObjectParent(pc,bjac->pmat[i]);CHKERRQ(ierr);
1210     ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr);
1211     if (jac->use_true_local) {
1212       ierr = PetscLogObjectParent(pc,bjac->mat[i]);CHKERRQ(ierr);
1213       ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr);
1214       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1215     } else {
1216       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1217     }
1218     if(pc->setfromoptionscalled){
1219       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1220     }
1221   }
1222   PetscFunctionReturn(0);
1223 }
1224 
1225 /* ---------------------------------------------------------------------------------------------*/
1226 /*
1227       These are for a single block with multiple processes;
1228 */
1229 #undef __FUNCT__
1230 #define __FUNCT__ "PCReset_BJacobi_Multiproc"
1231 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1232 {
1233   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1234   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1235   PetscErrorCode       ierr;
1236 
1237   PetscFunctionBegin;
1238   if (mpjac->ysub){ierr = VecDestroy(mpjac->ysub);CHKERRQ(ierr);}
1239   if (mpjac->xsub){ierr = VecDestroy(mpjac->xsub);CHKERRQ(ierr);}
1240   if (mpjac->submats){ierr = MatDestroy(mpjac->submats);CHKERRQ(ierr);}
1241   if (mpjac->ksp){ierr = KSPReset(mpjac->ksp);CHKERRQ(ierr);}
1242   PetscFunctionReturn(0);
1243 }
1244 
1245 #undef __FUNCT__
1246 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc"
1247 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1248 {
1249   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1250   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1251   PetscErrorCode       ierr;
1252 
1253   PetscFunctionBegin;
1254   if (mpjac->ksp){ierr = KSPDestroy(mpjac->ksp);CHKERRQ(ierr);}
1255   if (mpjac->psubcomm){ierr = PetscSubcommDestroy(mpjac->psubcomm);CHKERRQ(ierr);}
1256 
1257   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1258   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1259   PetscFunctionReturn(0);
1260 }
1261 
1262 #undef __FUNCT__
1263 #define __FUNCT__ "PCApply_BJacobi_Multiproc"
1264 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1265 {
1266   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1267   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1268   PetscErrorCode       ierr;
1269   PetscScalar          *xarray,*yarray;
1270 
1271   PetscFunctionBegin;
1272   /* place x's and y's local arrays into xsub and ysub */
1273   ierr = VecGetArray(x,&xarray);CHKERRQ(ierr);
1274   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1275   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1276   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1277 
1278   /* apply preconditioner on each matrix block */
1279   ierr = KSPSolve(mpjac->ksp,mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1280 
1281   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1282   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1283   ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1284   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 extern PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,Mat*);
1289 #undef __FUNCT__
1290 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc"
1291 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1292 {
1293   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1294   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1295   PetscErrorCode       ierr;
1296   PetscInt             m,n;
1297   MPI_Comm             comm = ((PetscObject)pc)->comm,subcomm=0;
1298   const char           *prefix;
1299 
1300   PetscFunctionBegin;
1301   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1302   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1303   if (!pc->setupcalled) {
1304     ierr = PetscNewLog(pc,PC_BJacobi_Multiproc,&mpjac);CHKERRQ(ierr);
1305     jac->data = (void*)mpjac;
1306 
1307     /* initialize datastructure mpjac */
1308     if (!jac->psubcomm) {
1309       /* Create default contiguous subcommunicatiors if user does not provide them */
1310       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1311       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1312       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1313       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1314     }
1315     mpjac->psubcomm = jac->psubcomm;
1316     subcomm         = mpjac->psubcomm->comm;
1317 
1318     /* Get matrix blocks of pmat */
1319     ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,&mpjac->submats);CHKERRQ(ierr);
1320 
1321     /* create a new PC that processors in each subcomm have copy of */
1322     ierr = KSPCreate(subcomm,&mpjac->ksp);CHKERRQ(ierr);
1323     ierr = PetscObjectIncrementTabLevel((PetscObject)mpjac->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1324     ierr = PetscLogObjectParent(pc,mpjac->ksp);CHKERRQ(ierr);
1325     ierr = KSPSetOperators(mpjac->ksp,mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr);
1326     ierr = KSPGetPC(mpjac->ksp,&mpjac->pc);CHKERRQ(ierr);
1327 
1328     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1329     ierr = KSPSetOptionsPrefix(mpjac->ksp,prefix);CHKERRQ(ierr);
1330     ierr = KSPAppendOptionsPrefix(mpjac->ksp,"sub_");CHKERRQ(ierr);
1331     /*
1332       PetscMPIInt rank,subsize,subrank;
1333       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1334       ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
1335       ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
1336 
1337       ierr = MatGetLocalSize(mpjac->submats,&m,PETSC_NULL);CHKERRQ(ierr);
1338       ierr = MatGetSize(mpjac->submats,&n,PETSC_NULL);CHKERRQ(ierr);
1339       ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank);
1340       ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
1341     */
1342 
1343     /* create dummy vectors xsub and ysub */
1344     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1345     ierr = VecCreateMPIWithArray(subcomm,n,PETSC_DECIDE,PETSC_NULL,&mpjac->xsub);CHKERRQ(ierr);
1346     ierr = VecCreateMPIWithArray(subcomm,m,PETSC_DECIDE,PETSC_NULL,&mpjac->ysub);CHKERRQ(ierr);
1347     ierr = PetscLogObjectParent(pc,mpjac->xsub);CHKERRQ(ierr);
1348     ierr = PetscLogObjectParent(pc,mpjac->ysub);CHKERRQ(ierr);
1349 
1350     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1351     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1352     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1353   }
1354 
1355   if (pc->setupcalled && pc->flag == DIFFERENT_NONZERO_PATTERN) {
1356     /* destroy old matrix blocks, then get new matrix blocks */
1357     if (mpjac->submats) {
1358       ierr = MatDestroy(mpjac->submats);CHKERRQ(ierr);
1359       subcomm = mpjac->psubcomm->comm;
1360       ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,&mpjac->submats);CHKERRQ(ierr);
1361       ierr = KSPSetOperators(mpjac->ksp,mpjac->submats,mpjac->submats,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
1362     }
1363   }
1364 
1365   if (pc->setfromoptionscalled){
1366     ierr = KSPSetFromOptions(mpjac->ksp);CHKERRQ(ierr);
1367   }
1368   PetscFunctionReturn(0);
1369 }
1370