xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision c5b2ea42cd4070151795b0df3c8c47a133418b2b)
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 #include <private/pcimpl.h>              /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h>
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "PCSetUp_BJacobi"
14 static PetscErrorCode PCSetUp_BJacobi(PC pc)
15 {
16   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
17   Mat            mat = pc->mat,pmat = pc->pmat;
18   PetscErrorCode ierr,(*f)(Mat,PetscBool *,MatReuse,Mat*);
19   PetscInt       N,M,start,i,sum,end;
20   PetscInt       bs,i_start=-1,i_end=-1;
21   PetscMPIInt    rank,size;
22   const char     *pprefix,*mprefix;
23 
24   PetscFunctionBegin;
25   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
26   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
27   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
28   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
29 
30   if (jac->n > 0 && jac->n < size){
31     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
32     PetscFunctionReturn(0);
33   }
34 
35   /* --------------------------------------------------------------------------
36       Determines the number of blocks assigned to each processor
37   -----------------------------------------------------------------------------*/
38 
39   /*   local block count  given */
40   if (jac->n_local > 0 && jac->n < 0) {
41     ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
42     if (jac->l_lens) { /* check that user set these correctly */
43       sum = 0;
44       for (i=0; i<jac->n_local; i++) {
45         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
46         sum += jac->l_lens[i];
47       }
48       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
49     } else {
50       ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
51       for (i=0; i<jac->n_local; i++) {
52         jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
53       }
54     }
55   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
56     /* global blocks given: determine which ones are local */
57     if (jac->g_lens) {
58       /* check if the g_lens is has valid entries */
59       for (i=0; i<jac->n; i++) {
60         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
61         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
62       }
63       if (size == 1) {
64         jac->n_local = jac->n;
65         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
66         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
67         /* check that user set these correctly */
68         sum = 0;
69         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
70         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
71       } else {
72         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
73         /* loop over blocks determing first one owned by me */
74         sum = 0;
75         for (i=0; i<jac->n+1; i++) {
76           if (sum == start) { i_start = i; goto start_1;}
77           if (i < jac->n) sum += jac->g_lens[i];
78         }
79         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
80  start_1:
81         for (i=i_start; i<jac->n+1; i++) {
82           if (sum == end) { i_end = i; goto end_1; }
83           if (i < jac->n) sum += jac->g_lens[i];
84         }
85         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
86  end_1:
87         jac->n_local = i_end - i_start;
88         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
89         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
90       }
91     } else { /* no global blocks given, determine then using default layout */
92       jac->n_local = jac->n/size + ((jac->n % size) > rank);
93       ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
94       for (i=0; i<jac->n_local; i++) {
95         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
96         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
97       }
98     }
99   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
100     jac->n         = size;
101     jac->n_local   = 1;
102     ierr           = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
103     jac->l_lens[0] = M;
104   }
105   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
106 
107   /* -------------------------
108       Determines mat and pmat
109   ---------------------------*/
110   ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
111   if (size == 1 && !f) {
112     mat  = pc->mat;
113     pmat = pc->pmat;
114   } else {
115     PetscBool  iscopy;
116     MatReuse   scall;
117 
118     if (jac->use_true_local) {
119       /* use block from true matrix, not preconditioner matrix for local MatMult() */
120       scall = MAT_INITIAL_MATRIX;
121       if (pc->setupcalled) {
122         if (pc->flag == SAME_NONZERO_PATTERN) {
123           if (jac->tp_mat) {
124             scall = MAT_REUSE_MATRIX;
125             mat   = jac->tp_mat;
126           }
127         } else {
128           if (jac->tp_mat)  {
129             ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr);
130           }
131         }
132       }
133       if (!f) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"This matrix does not support getting diagonal block");
134       ierr = (*f)(pc->mat,&iscopy,scall,&mat);CHKERRQ(ierr);
135       /* make submatrix have same prefix as entire matrix */
136       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr);
137       ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr);
138       if (iscopy) {
139         jac->tp_mat = mat;
140       }
141     }
142     if (pc->pmat != pc->mat || !jac->use_true_local) {
143       scall = MAT_INITIAL_MATRIX;
144       if (pc->setupcalled) {
145         if (pc->flag == SAME_NONZERO_PATTERN) {
146           if (jac->tp_pmat) {
147             scall = MAT_REUSE_MATRIX;
148             pmat   = jac->tp_pmat;
149           }
150         } else {
151           if (jac->tp_pmat)  {
152             ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr);
153           }
154         }
155       }
156       ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
157       if (!f) {
158         const char *type;
159         ierr = PetscObjectGetType((PetscObject) pc->pmat,&type);CHKERRQ(ierr);
160         SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"This matrix type, %s, does not support getting diagonal block", type);
161       }
162       ierr = (*f)(pc->pmat,&iscopy,scall,&pmat);CHKERRQ(ierr);
163       /* make submatrix have same prefix as entire matrix */
164       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
165       ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr);
166       if (iscopy) {
167         jac->tp_pmat = pmat;
168       }
169     } else {
170       pmat = mat;
171     }
172   }
173 
174   /* ------
175      Setup code depends on the number of blocks
176   */
177   if (jac->n_local == 1) {
178     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
179   } else {
180     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
181   }
182   PetscFunctionReturn(0);
183 }
184 
185 /* Default destroy, if it has never been setup */
186 #undef __FUNCT__
187 #define __FUNCT__ "PCDestroy_BJacobi"
188 static PetscErrorCode PCDestroy_BJacobi(PC pc)
189 {
190   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
191   PetscErrorCode ierr;
192 
193   PetscFunctionBegin;
194   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
195   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
196   ierr = PetscFree(pc->data);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "PCSetFromOptions_BJacobi"
202 
203 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc)
204 {
205   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
206   PetscErrorCode ierr;
207   PetscInt       blocks;
208   PetscBool      flg;
209 
210   PetscFunctionBegin;
211   ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr);
212     ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
213     if (flg) {
214       ierr = PCBJacobiSetTotalBlocks(pc,blocks,PETSC_NULL);CHKERRQ(ierr);
215     }
216     flg  = PETSC_FALSE;
217     ierr = PetscOptionsBool("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
218     if (flg) {
219       ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr);
220     }
221   ierr = PetscOptionsTail();CHKERRQ(ierr);
222   PetscFunctionReturn(0);
223 }
224 
225 #undef __FUNCT__
226 #define __FUNCT__ "PCView_BJacobi"
227 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
228 {
229   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
230   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
231   PetscErrorCode       ierr;
232   PetscMPIInt          rank;
233   PetscInt             i;
234   PetscBool            iascii,isstring;
235   PetscViewer          sviewer;
236 
237   PetscFunctionBegin;
238   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
239   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
240   if (iascii) {
241     if (jac->use_true_local) {
242       ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: using true local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
243     }
244     ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr);
245     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
246     if (jac->same_local_solves) {
247       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
248       if (jac->ksp) {
249         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
250         if (!rank){
251           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
252           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
253           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
254         }
255         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
256       } else if (jac->psubcomm && !jac->psubcomm->color){
257         ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr);
258         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
259         ierr = KSPView(mpjac->ksp,sviewer);CHKERRQ(ierr);
260         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
261       }
262     } else {
263       PetscInt n_global;
264       ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
265       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
266       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
267       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
268                    rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
269       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
270       for (i=0; i<n_global; i++) {
271         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
272         if (i < jac->n_local) {
273           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
274           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
275           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
276         }
277         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
278       }
279       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
280       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
281       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
282     }
283   } else if (isstring) {
284     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
285     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
286     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
287     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
288   } else {
289     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for block Jacobi",((PetscObject)viewer)->type_name);
290   }
291   PetscFunctionReturn(0);
292 }
293 
294 /* -------------------------------------------------------------------------------------*/
295 
296 EXTERN_C_BEGIN
297 #undef __FUNCT__
298 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi"
299 PetscErrorCode  PCBJacobiSetUseTrueLocal_BJacobi(PC pc)
300 {
301   PC_BJacobi   *jac;
302 
303   PetscFunctionBegin;
304   jac                 = (PC_BJacobi*)pc->data;
305   jac->use_true_local = PETSC_TRUE;
306   PetscFunctionReturn(0);
307 }
308 EXTERN_C_END
309 
310 EXTERN_C_BEGIN
311 #undef __FUNCT__
312 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi"
313 PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
314 {
315   PC_BJacobi   *jac = (PC_BJacobi*)pc->data;;
316 
317   PetscFunctionBegin;
318   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
319 
320   if (n_local)     *n_local     = jac->n_local;
321   if (first_local) *first_local = jac->first_local;
322   *ksp                          = jac->ksp;
323   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
324                                                   not necessarily true though!  This flag is
325                                                   used only for PCView_BJacobi() */
326   PetscFunctionReturn(0);
327 }
328 EXTERN_C_END
329 
330 EXTERN_C_BEGIN
331 #undef __FUNCT__
332 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi"
333 PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
334 {
335   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
336   PetscErrorCode ierr;
337 
338   PetscFunctionBegin;
339 
340   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
341   jac->n = blocks;
342   if (!lens) {
343     jac->g_lens = 0;
344   } else {
345     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->g_lens);CHKERRQ(ierr);
346     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
347     ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
348   }
349   PetscFunctionReturn(0);
350 }
351 EXTERN_C_END
352 
353 EXTERN_C_BEGIN
354 #undef __FUNCT__
355 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi"
356 PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
357 {
358   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
359 
360   PetscFunctionBegin;
361   *blocks = jac->n;
362   if (lens) *lens = jac->g_lens;
363   PetscFunctionReturn(0);
364 }
365 EXTERN_C_END
366 
367 EXTERN_C_BEGIN
368 #undef __FUNCT__
369 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi"
370 PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
371 {
372   PC_BJacobi     *jac;
373   PetscErrorCode ierr;
374 
375   PetscFunctionBegin;
376   jac = (PC_BJacobi*)pc->data;
377 
378   jac->n_local = blocks;
379   if (!lens) {
380     jac->l_lens = 0;
381   } else {
382     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
383     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
384     ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
385   }
386   PetscFunctionReturn(0);
387 }
388 EXTERN_C_END
389 
390 EXTERN_C_BEGIN
391 #undef __FUNCT__
392 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi"
393 PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
394 {
395   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
396 
397   PetscFunctionBegin;
398   *blocks = jac->n_local;
399   if (lens) *lens = jac->l_lens;
400   PetscFunctionReturn(0);
401 }
402 EXTERN_C_END
403 
404 /* -------------------------------------------------------------------------------------*/
405 
406 #undef __FUNCT__
407 #define __FUNCT__ "PCBJacobiSetUseTrueLocal"
408 /*@
409    PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block
410    problem is associated with the linear system matrix instead of the
411    default (where it is associated with the preconditioning matrix).
412    That is, if the local system is solved iteratively then it iterates
413    on the block from the matrix using the block from the preconditioner
414    as the preconditioner for the local block.
415 
416    Logically Collective on PC
417 
418    Input Parameters:
419 .  pc - the preconditioner context
420 
421    Options Database Key:
422 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
423 
424    Notes:
425    For the common case in which the preconditioning and linear
426    system matrices are identical, this routine is unnecessary.
427 
428    Level: intermediate
429 
430 .keywords:  block, Jacobi, set, true, local, flag
431 
432 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks()
433 @*/
434 PetscErrorCode  PCBJacobiSetUseTrueLocal(PC pc)
435 {
436   PetscErrorCode ierr;
437 
438   PetscFunctionBegin;
439   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
440   ierr = PetscTryMethod(pc,"PCBJacobiSetUseTrueLocal_C",(PC),(pc));CHKERRQ(ierr);
441   PetscFunctionReturn(0);
442 }
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "PCBJacobiGetSubKSP"
446 /*@C
447    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
448    this processor.
449 
450    Note Collective
451 
452    Input Parameter:
453 .  pc - the preconditioner context
454 
455    Output Parameters:
456 +  n_local - the number of blocks on this processor, or PETSC_NULL
457 .  first_local - the global number of the first block on this processor, or PETSC_NULL
458 -  ksp - the array of KSP contexts
459 
460    Notes:
461    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
462 
463    Currently for some matrix implementations only 1 block per processor
464    is supported.
465 
466    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
467 
468    Level: advanced
469 
470 .keywords:  block, Jacobi, get, sub, KSP, context
471 
472 .seealso: PCBJacobiGetSubKSP()
473 @*/
474 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
475 {
476   PetscErrorCode ierr;
477 
478   PetscFunctionBegin;
479   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
480   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt *,PetscInt *,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
481   PetscFunctionReturn(0);
482 }
483 
484 #undef __FUNCT__
485 #define __FUNCT__ "PCBJacobiSetTotalBlocks"
486 /*@
487    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
488    Jacobi preconditioner.
489 
490    Collective on PC
491 
492    Input Parameters:
493 +  pc - the preconditioner context
494 .  blocks - the number of blocks
495 -  lens - [optional] integer array containing the size of each block
496 
497    Options Database Key:
498 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
499 
500    Notes:
501    Currently only a limited number of blocking configurations are supported.
502    All processors sharing the PC must call this routine with the same data.
503 
504    Level: intermediate
505 
506 .keywords:  set, number, Jacobi, global, total, blocks
507 
508 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks()
509 @*/
510 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
511 {
512   PetscErrorCode ierr;
513 
514   PetscFunctionBegin;
515   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
516   if (blocks <= 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
517   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "PCBJacobiGetTotalBlocks"
523 /*@C
524    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
525    Jacobi preconditioner.
526 
527    Not Collective
528 
529    Input Parameter:
530 .  pc - the preconditioner context
531 
532    Output parameters:
533 +  blocks - the number of blocks
534 -  lens - integer array containing the size of each block
535 
536    Level: intermediate
537 
538 .keywords:  get, number, Jacobi, global, total, blocks
539 
540 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks()
541 @*/
542 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
543 {
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
548   PetscValidIntPointer(blocks,2);
549   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
550   PetscFunctionReturn(0);
551 }
552 
553 #undef __FUNCT__
554 #define __FUNCT__ "PCBJacobiSetLocalBlocks"
555 /*@
556    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
557    Jacobi preconditioner.
558 
559    Not Collective
560 
561    Input Parameters:
562 +  pc - the preconditioner context
563 .  blocks - the number of blocks
564 -  lens - [optional] integer array containing size of each block
565 
566    Note:
567    Currently only a limited number of blocking configurations are supported.
568 
569    Level: intermediate
570 
571 .keywords: PC, set, number, Jacobi, local, blocks
572 
573 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks()
574 @*/
575 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
576 {
577   PetscErrorCode ierr;
578 
579   PetscFunctionBegin;
580   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
581   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
582   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
583   PetscFunctionReturn(0);
584 }
585 
586 #undef __FUNCT__
587 #define __FUNCT__ "PCBJacobiGetLocalBlocks"
588 /*@C
589    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
590    Jacobi preconditioner.
591 
592    Not Collective
593 
594    Input Parameters:
595 +  pc - the preconditioner context
596 .  blocks - the number of blocks
597 -  lens - [optional] integer array containing size of each block
598 
599    Note:
600    Currently only a limited number of blocking configurations are supported.
601 
602    Level: intermediate
603 
604 .keywords: PC, get, number, Jacobi, local, blocks
605 
606 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks()
607 @*/
608 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
609 {
610   PetscErrorCode ierr;
611 
612   PetscFunctionBegin;
613   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
614   PetscValidIntPointer(blocks,2);
615   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
616   PetscFunctionReturn(0);
617 }
618 
619 /* -----------------------------------------------------------------------------------*/
620 
621 /*MC
622    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
623            its own KSP object.
624 
625    Options Database Keys:
626 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
627 
628    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
629      than one processor. Defaults to one block per processor.
630 
631      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
632         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
633 
634      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
635          and set the options directly on the resulting KSP object (you can access its PC
636          KSPGetPC())
637 
638    Level: beginner
639 
640    Concepts: block Jacobi
641 
642 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
643            PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
644            PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
645 M*/
646 
647 EXTERN_C_BEGIN
648 #undef __FUNCT__
649 #define __FUNCT__ "PCCreate_BJacobi"
650 PetscErrorCode  PCCreate_BJacobi(PC pc)
651 {
652   PetscErrorCode ierr;
653   PetscMPIInt    rank;
654   PC_BJacobi     *jac;
655 
656   PetscFunctionBegin;
657   ierr = PetscNewLog(pc,PC_BJacobi,&jac);CHKERRQ(ierr);
658   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
659   pc->ops->apply              = 0;
660   pc->ops->applytranspose     = 0;
661   pc->ops->setup              = PCSetUp_BJacobi;
662   pc->ops->destroy            = PCDestroy_BJacobi;
663   pc->ops->setfromoptions     = PCSetFromOptions_BJacobi;
664   pc->ops->view               = PCView_BJacobi;
665   pc->ops->applyrichardson    = 0;
666 
667   pc->data               = (void*)jac;
668   jac->n                 = -1;
669   jac->n_local           = -1;
670   jac->first_local       = rank;
671   jac->ksp               = 0;
672   jac->use_true_local    = PETSC_FALSE;
673   jac->same_local_solves = PETSC_TRUE;
674   jac->g_lens            = 0;
675   jac->l_lens            = 0;
676   jac->tp_mat            = 0;
677   jac->tp_pmat           = 0;
678   jac->psubcomm          = 0;
679 
680   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C",
681                     "PCBJacobiSetUseTrueLocal_BJacobi",
682                     PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr);
683   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi",
684                     PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
685   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi",
686                     PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
687   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi",
688                     PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
689   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi",
690                     PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
691   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi",
692                     PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
693 
694   PetscFunctionReturn(0);
695 }
696 EXTERN_C_END
697 
698 /* --------------------------------------------------------------------------------------------*/
699 /*
700         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
701 */
702 #undef __FUNCT__
703 #define __FUNCT__ "PCReset_BJacobi_Singleblock"
704 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
705 {
706   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
707   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
708   PetscErrorCode         ierr;
709 
710   PetscFunctionBegin;
711   /*
712         If the on processor block had to be generated via a MatGetDiagonalBlock()
713      that creates a copy, this frees the space
714   */
715   if (jac->tp_mat) {
716     ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr);
717   }
718   if (jac->tp_pmat) {
719     ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr);
720   }
721 
722   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
723   if (bjac->x) {ierr = VecDestroy(bjac->x);CHKERRQ(ierr);}
724   if (bjac->y) {ierr = VecDestroy(bjac->y);CHKERRQ(ierr);}
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock"
730 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
731 {
732   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
733   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
734   PetscErrorCode         ierr;
735 
736   PetscFunctionBegin;
737   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
738   ierr = KSPDestroy(jac->ksp[0]);CHKERRQ(ierr);
739   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
740   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
741   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
742   ierr = PetscFree(bjac);CHKERRQ(ierr);
743   ierr = PetscFree(pc->data);CHKERRQ(ierr);
744   PetscFunctionReturn(0);
745 }
746 
747 #undef __FUNCT__
748 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock"
749 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
750 {
751   PetscErrorCode ierr;
752   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
753 
754   PetscFunctionBegin;
755   ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr);
756   PetscFunctionReturn(0);
757 }
758 
759 #undef __FUNCT__
760 #define __FUNCT__ "PCApply_BJacobi_Singleblock"
761 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
762 {
763   PetscErrorCode         ierr;
764   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
765   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
766   PetscScalar            *x_array,*y_array;
767 
768   PetscFunctionBegin;
769   /*
770       The VecPlaceArray() is to avoid having to copy the
771     y vector into the bjac->x vector. The reason for
772     the bjac->x vector is that we need a sequential vector
773     for the sequential solve.
774   */
775   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
776   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
777   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
778   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
779   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
780   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
781   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
782   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
783   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
784   PetscFunctionReturn(0);
785 }
786 
787 #undef __FUNCT__
788 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock"
789 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
790 {
791   PetscErrorCode         ierr;
792   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
793   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
794   PetscScalar            *x_array,*y_array;
795   PC                     subpc;
796 
797   PetscFunctionBegin;
798   /*
799       The VecPlaceArray() is to avoid having to copy the
800     y vector into the bjac->x vector. The reason for
801     the bjac->x vector is that we need a sequential vector
802     for the sequential solve.
803   */
804   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
805   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
806   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
807   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
808 
809   /* apply the symmetric left portion of the inner PC operator */
810   /* note this by-passes the inner KSP and its options completely */
811 
812   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
813   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
814   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
815   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
816 
817   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
818   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock"
824 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
825 {
826   PetscErrorCode         ierr;
827   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
828   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
829   PetscScalar            *x_array,*y_array;
830   PC                     subpc;
831 
832   PetscFunctionBegin;
833   /*
834       The VecPlaceArray() is to avoid having to copy the
835     y vector into the bjac->x vector. The reason for
836     the bjac->x vector is that we need a sequential vector
837     for the sequential solve.
838   */
839   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
840   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
841   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
842   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
843 
844   /* apply the symmetric right portion of the inner PC operator */
845   /* note this by-passes the inner KSP and its options completely */
846 
847   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
848   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
849 
850   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
851   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock"
857 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
858 {
859   PetscErrorCode         ierr;
860   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
861   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
862   PetscScalar            *x_array,*y_array;
863 
864   PetscFunctionBegin;
865   /*
866       The VecPlaceArray() is to avoid having to copy the
867     y vector into the bjac->x vector. The reason for
868     the bjac->x vector is that we need a sequential vector
869     for the sequential solve.
870   */
871   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
872   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
873   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
874   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
875   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
876   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
877   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
878   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
879   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 #undef __FUNCT__
884 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock"
885 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
886 {
887   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
888   PetscErrorCode         ierr;
889   PetscInt               m;
890   KSP                    ksp;
891   Vec                    x,y;
892   PC_BJacobi_Singleblock *bjac;
893   PetscBool              wasSetup;
894 
895   PetscFunctionBegin;
896 
897   /* set default direct solver with no Krylov method */
898   if (!pc->setupcalled) {
899     const char *prefix;
900     wasSetup = PETSC_FALSE;
901     ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
902     ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
903     ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
904     ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
905     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
906     ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
907     ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
908     /*
909       The reason we need to generate these vectors is to serve
910       as the right-hand side and solution vector for the solve on the
911       block. We do not need to allocate space for the vectors since
912       that is provided via VecPlaceArray() just before the call to
913       KSPSolve() on the block.
914     */
915     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
916     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&x);CHKERRQ(ierr);
917     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr);
918     ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr);
919     ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr);
920 
921     pc->ops->reset               = PCReset_BJacobi_Singleblock;
922     pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
923     pc->ops->apply               = PCApply_BJacobi_Singleblock;
924     pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
925     pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
926     pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
927     pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
928 
929     ierr = PetscNewLog(pc,PC_BJacobi_Singleblock,&bjac);CHKERRQ(ierr);
930     bjac->x      = x;
931     bjac->y      = y;
932 
933     ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
934     jac->ksp[0] = ksp;
935     jac->data    = (void*)bjac;
936   } else {
937     wasSetup = PETSC_TRUE;
938     ksp = jac->ksp[0];
939     bjac = (PC_BJacobi_Singleblock *)jac->data;
940   }
941   if (jac->use_true_local) {
942     ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr);
943   }  else {
944     ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr);
945   }
946   if (!wasSetup && pc->setfromoptionscalled) {
947     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
948   }
949   PetscFunctionReturn(0);
950 }
951 
952 /* ---------------------------------------------------------------------------------------------*/
953 #undef __FUNCT__
954 #define __FUNCT__ "PCReset_BJacobi_Multiblock"
955 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
956 {
957   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
958   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
959   PetscErrorCode        ierr;
960   PetscInt              i;
961 
962   PetscFunctionBegin;
963   if (bjac && bjac->pmat) {
964     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
965     if (jac->use_true_local) {
966       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
967     }
968   }
969 
970   /*
971         If the on processor block had to be generated via a MatGetDiagonalBlock()
972      that creates a copy, this frees the space
973   */
974   if (jac->tp_mat) {
975     ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr);
976   }
977   if (jac->tp_pmat) {
978     ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr);
979   }
980 
981   for (i=0; i<jac->n_local; i++) {
982     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
983     if (bjac && bjac->x) {
984       ierr = VecDestroy(bjac->x[i]);CHKERRQ(ierr);
985       ierr = VecDestroy(bjac->y[i]);CHKERRQ(ierr);
986       ierr = ISDestroy(bjac->is[i]);CHKERRQ(ierr);
987     }
988   }
989   if (bjac) {
990     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
991     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
992     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
993   }
994   ierr = PetscFree(jac->data);CHKERRQ(ierr);
995   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
996   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
997   PetscFunctionReturn(0);
998 }
999 
1000 #undef __FUNCT__
1001 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock"
1002 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
1003 {
1004   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1005   PetscErrorCode        ierr;
1006   PetscInt              i;
1007 
1008   PetscFunctionBegin;
1009   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
1010   for (i=0; i<jac->n_local; i++) {
1011     ierr = KSPDestroy(jac->ksp[i]);CHKERRQ(ierr);
1012   }
1013   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1014   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1015   PetscFunctionReturn(0);
1016 }
1017 
1018 #undef __FUNCT__
1019 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock"
1020 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
1021 {
1022   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
1023   PetscErrorCode ierr;
1024   PetscInt       i,n_local = jac->n_local;
1025 
1026   PetscFunctionBegin;
1027   for (i=0; i<n_local; i++) {
1028     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
1029   }
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 /*
1034       Preconditioner for block Jacobi
1035 */
1036 #undef __FUNCT__
1037 #define __FUNCT__ "PCApply_BJacobi_Multiblock"
1038 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1039 {
1040   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1041   PetscErrorCode        ierr;
1042   PetscInt              i,n_local = jac->n_local;
1043   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1044   PetscScalar           *xin,*yin;
1045 
1046   PetscFunctionBegin;
1047   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1048   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1049   for (i=0; i<n_local; i++) {
1050     /*
1051        To avoid copying the subvector from x into a workspace we instead
1052        make the workspace vector array point to the subpart of the array of
1053        the global vector.
1054     */
1055     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1056     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1057 
1058     ierr = PetscLogEventBegin(PC_SetUpOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1059     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1060     ierr = PetscLogEventEnd(PC_SetUpOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1061 
1062     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
1063     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
1064   }
1065   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1066   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 /*
1071       Preconditioner for block Jacobi
1072 */
1073 #undef __FUNCT__
1074 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock"
1075 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1076 {
1077   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1078   PetscErrorCode        ierr;
1079   PetscInt              i,n_local = jac->n_local;
1080   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1081   PetscScalar           *xin,*yin;
1082 
1083   PetscFunctionBegin;
1084   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1085   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1086   for (i=0; i<n_local; i++) {
1087     /*
1088        To avoid copying the subvector from x into a workspace we instead
1089        make the workspace vector array point to the subpart of the array of
1090        the global vector.
1091     */
1092     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1093     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1094 
1095     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1096     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1097     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1098   }
1099   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1100   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1101   PetscFunctionReturn(0);
1102 }
1103 
1104 #undef __FUNCT__
1105 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock"
1106 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
1107 {
1108   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
1109   PetscErrorCode         ierr;
1110   PetscInt               m,n_local,N,M,start,i;
1111   const char             *prefix,*pprefix,*mprefix;
1112   KSP                    ksp;
1113   Vec                    x,y;
1114   PC_BJacobi_Multiblock  *bjac = (PC_BJacobi_Multiblock*)jac->data;
1115   PC                     subpc;
1116   IS                     is;
1117   MatReuse               scall = MAT_REUSE_MATRIX;
1118 
1119   PetscFunctionBegin;
1120   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
1121 
1122   n_local = jac->n_local;
1123 
1124   if (jac->use_true_local) {
1125     PetscBool  same;
1126     ierr = PetscTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
1127     if (!same) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1128   }
1129 
1130   if (!pc->setupcalled) {
1131     scall                  = MAT_INITIAL_MATRIX;
1132     pc->ops->reset         = PCReset_BJacobi_Multiblock;
1133     pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1134     pc->ops->apply         = PCApply_BJacobi_Multiblock;
1135     pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1136     pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1137 
1138     ierr = PetscNewLog(pc,PC_BJacobi_Multiblock,&bjac);CHKERRQ(ierr);
1139     ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1140     ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1141     ierr = PetscMalloc2(n_local,Vec,&bjac->x,n_local,Vec,&bjac->y);CHKERRQ(ierr);
1142     ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr);
1143     ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1144 
1145     jac->data    = (void*)bjac;
1146     ierr = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr);
1147     ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1148 
1149     start = 0;
1150     for (i=0; i<n_local; i++) {
1151       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1152       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1153       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
1154       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1155       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1156       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1157       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1158       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1159 
1160       m = jac->l_lens[i];
1161 
1162       /*
1163       The reason we need to generate these vectors is to serve
1164       as the right-hand side and solution vector for the solve on the
1165       block. We do not need to allocate space for the vectors since
1166       that is provided via VecPlaceArray() just before the call to
1167       KSPSolve() on the block.
1168 
1169       */
1170       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1171       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr);
1172       ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr);
1173       ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr);
1174       bjac->x[i]      = x;
1175       bjac->y[i]      = y;
1176       bjac->starts[i] = start;
1177       jac->ksp[i]    = ksp;
1178 
1179       ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1180       bjac->is[i] = is;
1181       ierr = PetscLogObjectParent(pc,is);CHKERRQ(ierr);
1182 
1183       start += m;
1184     }
1185   } else {
1186     bjac = (PC_BJacobi_Multiblock*)jac->data;
1187     /*
1188        Destroy the blocks from the previous iteration
1189     */
1190     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1191       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1192       if (jac->use_true_local) {
1193         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1194       }
1195       scall = MAT_INITIAL_MATRIX;
1196     }
1197   }
1198 
1199   ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1200   if (jac->use_true_local) {
1201     ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr);
1202     ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1203   }
1204   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1205      different boundary conditions for the submatrices than for the global problem) */
1206   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1207 
1208   ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr);
1209   for (i=0; i<n_local; i++) {
1210     ierr = PetscLogObjectParent(pc,bjac->pmat[i]);CHKERRQ(ierr);
1211     ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr);
1212     if (jac->use_true_local) {
1213       ierr = PetscLogObjectParent(pc,bjac->mat[i]);CHKERRQ(ierr);
1214       ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr);
1215       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1216     } else {
1217       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1218     }
1219     if(pc->setfromoptionscalled){
1220       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1221     }
1222   }
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 /* ---------------------------------------------------------------------------------------------*/
1227 /*
1228       These are for a single block with multiple processes;
1229 */
1230 #undef __FUNCT__
1231 #define __FUNCT__ "PCReset_BJacobi_Multiproc"
1232 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1233 {
1234   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1235   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1236   PetscErrorCode       ierr;
1237 
1238   PetscFunctionBegin;
1239   if (mpjac->ysub){ierr = VecDestroy(mpjac->ysub);CHKERRQ(ierr);}
1240   if (mpjac->xsub){ierr = VecDestroy(mpjac->xsub);CHKERRQ(ierr);}
1241   if (mpjac->submats){ierr = MatDestroy(mpjac->submats);CHKERRQ(ierr);}
1242   if (mpjac->ksp){ierr = KSPReset(mpjac->ksp);CHKERRQ(ierr);}
1243   PetscFunctionReturn(0);
1244 }
1245 
1246 #undef __FUNCT__
1247 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc"
1248 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1249 {
1250   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1251   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1252   PetscErrorCode       ierr;
1253 
1254   PetscFunctionBegin;
1255   ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr);
1256   if (mpjac->ksp){ierr = KSPDestroy(mpjac->ksp);CHKERRQ(ierr);}
1257   if (mpjac->psubcomm){ierr = PetscSubcommDestroy(mpjac->psubcomm);CHKERRQ(ierr);}
1258 
1259   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1260   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1261   PetscFunctionReturn(0);
1262 }
1263 
1264 #undef __FUNCT__
1265 #define __FUNCT__ "PCApply_BJacobi_Multiproc"
1266 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1267 {
1268   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1269   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1270   PetscErrorCode       ierr;
1271   PetscScalar          *xarray,*yarray;
1272 
1273   PetscFunctionBegin;
1274   /* place x's and y's local arrays into xsub and ysub */
1275   ierr = VecGetArray(x,&xarray);CHKERRQ(ierr);
1276   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1277   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1278   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1279 
1280   /* apply preconditioner on each matrix block */
1281   ierr = KSPSolve(mpjac->ksp,mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1282 
1283   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1284   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1285   ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1286   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1287   PetscFunctionReturn(0);
1288 }
1289 
1290 extern PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,Mat*);
1291 #undef __FUNCT__
1292 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc"
1293 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1294 {
1295   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1296   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1297   PetscErrorCode       ierr;
1298   PetscInt             m,n;
1299   MPI_Comm             comm = ((PetscObject)pc)->comm,subcomm=0;
1300   const char           *prefix;
1301 
1302   PetscFunctionBegin;
1303   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1304   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1305   if (!pc->setupcalled) {
1306     ierr = PetscNewLog(pc,PC_BJacobi_Multiproc,&mpjac);CHKERRQ(ierr);
1307     jac->data = (void*)mpjac;
1308 
1309     /* initialize datastructure mpjac */
1310     if (!jac->psubcomm) {
1311       /* Create default contiguous subcommunicatiors if user does not provide them */
1312       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1313       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1314       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1315       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1316     }
1317     mpjac->psubcomm = jac->psubcomm;
1318     subcomm         = mpjac->psubcomm->comm;
1319 
1320     /* Get matrix blocks of pmat */
1321     ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,&mpjac->submats);CHKERRQ(ierr);
1322 
1323     /* create a new PC that processors in each subcomm have copy of */
1324     ierr = KSPCreate(subcomm,&mpjac->ksp);CHKERRQ(ierr);
1325     ierr = PetscObjectIncrementTabLevel((PetscObject)mpjac->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1326     ierr = PetscLogObjectParent(pc,mpjac->ksp);CHKERRQ(ierr);
1327     ierr = KSPSetOperators(mpjac->ksp,mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr);
1328     ierr = KSPGetPC(mpjac->ksp,&mpjac->pc);CHKERRQ(ierr);
1329 
1330     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1331     ierr = KSPSetOptionsPrefix(mpjac->ksp,prefix);CHKERRQ(ierr);
1332     ierr = KSPAppendOptionsPrefix(mpjac->ksp,"sub_");CHKERRQ(ierr);
1333     /*
1334       PetscMPIInt rank,subsize,subrank;
1335       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1336       ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
1337       ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
1338 
1339       ierr = MatGetLocalSize(mpjac->submats,&m,PETSC_NULL);CHKERRQ(ierr);
1340       ierr = MatGetSize(mpjac->submats,&n,PETSC_NULL);CHKERRQ(ierr);
1341       ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank);
1342       ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
1343     */
1344 
1345     /* create dummy vectors xsub and ysub */
1346     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1347     ierr = VecCreateMPIWithArray(subcomm,n,PETSC_DECIDE,PETSC_NULL,&mpjac->xsub);CHKERRQ(ierr);
1348     ierr = VecCreateMPIWithArray(subcomm,m,PETSC_DECIDE,PETSC_NULL,&mpjac->ysub);CHKERRQ(ierr);
1349     ierr = PetscLogObjectParent(pc,mpjac->xsub);CHKERRQ(ierr);
1350     ierr = PetscLogObjectParent(pc,mpjac->ysub);CHKERRQ(ierr);
1351 
1352     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1353     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1354     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1355   }
1356 
1357   if (pc->setupcalled && pc->flag == DIFFERENT_NONZERO_PATTERN) {
1358     /* destroy old matrix blocks, then get new matrix blocks */
1359     if (mpjac->submats) {
1360       ierr = MatDestroy(mpjac->submats);CHKERRQ(ierr);
1361       subcomm = mpjac->psubcomm->comm;
1362       ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,&mpjac->submats);CHKERRQ(ierr);
1363       ierr = KSPSetOperators(mpjac->ksp,mpjac->submats,mpjac->submats,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
1364     }
1365   }
1366 
1367   if (pc->setfromoptionscalled){
1368     ierr = KSPSetFromOptions(mpjac->ksp);CHKERRQ(ierr);
1369   }
1370   PetscFunctionReturn(0);
1371 }
1372