xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision 9ae82921df069a58776bfe4da82b38e8ff7dd41c)
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>              /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h>
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "PCSetUp_BJacobi"
14 static PetscErrorCode PCSetUp_BJacobi(PC pc)
15 {
16   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
17   Mat            mat = pc->mat,pmat = pc->pmat;
18   PetscErrorCode ierr,(*f)(Mat,Mat*);
19   PetscInt       N,M,start,i,sum,end;
20   PetscInt       bs,i_start=-1,i_end=-1;
21   PetscMPIInt    rank,size;
22   const char     *pprefix,*mprefix;
23 
24   PetscFunctionBegin;
25   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
26   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
27   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
28   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
29 
30   if (jac->n > 0 && jac->n < size){
31     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
32     PetscFunctionReturn(0);
33   }
34 
35   /* --------------------------------------------------------------------------
36       Determines the number of blocks assigned to each processor
37   -----------------------------------------------------------------------------*/
38 
39   /*   local block count  given */
40   if (jac->n_local > 0 && jac->n < 0) {
41     ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
42     if (jac->l_lens) { /* check that user set these correctly */
43       sum = 0;
44       for (i=0; i<jac->n_local; i++) {
45         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
46         sum += jac->l_lens[i];
47       }
48       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
49     } else {
50       ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
51       for (i=0; i<jac->n_local; i++) {
52         jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
53       }
54     }
55   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
56     /* global blocks given: determine which ones are local */
57     if (jac->g_lens) {
58       /* check if the g_lens is has valid entries */
59       for (i=0; i<jac->n; i++) {
60         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
61         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
62       }
63       if (size == 1) {
64         jac->n_local = jac->n;
65         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
66         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
67         /* check that user set these correctly */
68         sum = 0;
69         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
70         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
71       } else {
72         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
73         /* loop over blocks determing first one owned by me */
74         sum = 0;
75         for (i=0; i<jac->n+1; i++) {
76           if (sum == start) { i_start = i; goto start_1;}
77           if (i < jac->n) sum += jac->g_lens[i];
78         }
79         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
80  start_1:
81         for (i=i_start; i<jac->n+1; i++) {
82           if (sum == end) { i_end = i; goto end_1; }
83           if (i < jac->n) sum += jac->g_lens[i];
84         }
85         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
86  end_1:
87         jac->n_local = i_end - i_start;
88         ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
89         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
90       }
91     } else { /* no global blocks given, determine then using default layout */
92       jac->n_local = jac->n/size + ((jac->n % size) > rank);
93       ierr         = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
94       for (i=0; i<jac->n_local; i++) {
95         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
96         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
97       }
98     }
99   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
100     jac->n         = size;
101     jac->n_local   = 1;
102     ierr           = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
103     jac->l_lens[0] = M;
104   } else { /* jac->n > 0 && jac->n_local > 0 */
105     if (!jac->l_lens) {
106       ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
107       for (i=0; i<jac->n_local; i++) {
108         jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
109       }
110     }
111   }
112   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
113 
114   /* -------------------------
115       Determines mat and pmat
116   ---------------------------*/
117   ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
118   if (!f && size == 1) {
119     mat  = pc->mat;
120     pmat = pc->pmat;
121   } else {
122     if (jac->use_true_local) {
123       /* use block from true matrix, not preconditioner matrix for local MatMult() */
124       ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr);
125       /* make submatrix have same prefix as entire matrix */
126       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr);
127       ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr);
128     }
129     if (pc->pmat != pc->mat || !jac->use_true_local) {
130       ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr);
131       /* make submatrix have same prefix as entire matrix */
132       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
133       ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr);
134     } else {
135       pmat = mat;
136     }
137   }
138 
139   /* ------
140      Setup code depends on the number of blocks
141   */
142   if (jac->n_local == 1) {
143     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
144   } else {
145     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
146   }
147   PetscFunctionReturn(0);
148 }
149 
150 /* Default destroy, if it has never been setup */
151 #undef __FUNCT__
152 #define __FUNCT__ "PCDestroy_BJacobi"
153 static PetscErrorCode PCDestroy_BJacobi(PC pc)
154 {
155   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
156   PetscErrorCode ierr;
157 
158   PetscFunctionBegin;
159   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
160   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
161   ierr = PetscFree(pc->data);CHKERRQ(ierr);
162   PetscFunctionReturn(0);
163 }
164 
165 #undef __FUNCT__
166 #define __FUNCT__ "PCSetFromOptions_BJacobi"
167 
168 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc)
169 {
170   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
171   PetscErrorCode ierr;
172   PetscInt       blocks;
173   PetscBool      flg;
174 
175   PetscFunctionBegin;
176   ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr);
177     ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
178     if (flg) {
179       ierr = PCBJacobiSetTotalBlocks(pc,blocks,PETSC_NULL);CHKERRQ(ierr);
180     }
181     flg  = PETSC_FALSE;
182     ierr = PetscOptionsBool("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",flg,&flg,PETSC_NULL);CHKERRQ(ierr);
183     if (flg) {
184       ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr);
185     }
186   ierr = PetscOptionsTail();CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNCT__
191 #define __FUNCT__ "PCView_BJacobi"
192 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
193 {
194   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
195   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
196   PetscErrorCode       ierr;
197   PetscMPIInt          rank;
198   PetscInt             i;
199   PetscBool            iascii,isstring;
200   PetscViewer          sviewer;
201 
202   PetscFunctionBegin;
203   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
204   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
205   if (iascii) {
206     if (jac->use_true_local) {
207       ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: using true local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
208     }
209     ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr);
210     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
211     if (jac->same_local_solves) {
212       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
213       if (jac->ksp && !jac->psubcomm) {
214         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
215         if (!rank){
216           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
217           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
218           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
219         }
220         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
221       } else if (jac->psubcomm && !jac->psubcomm->color){
222         ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr);
223         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
224         ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr);
225         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
226       }
227     } else {
228       PetscInt n_global;
229       ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
230       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
231       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
232       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
233                    rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
234       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
235       for (i=0; i<n_global; i++) {
236         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
237         if (i < jac->n_local) {
238           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
239           ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
240           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
241         }
242         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
243       }
244       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
245       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
246       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
247     }
248   } else if (isstring) {
249     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
250     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
251     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
252     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
253   } else {
254     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for block Jacobi",((PetscObject)viewer)->type_name);
255   }
256   PetscFunctionReturn(0);
257 }
258 
259 /* -------------------------------------------------------------------------------------*/
260 
261 EXTERN_C_BEGIN
262 #undef __FUNCT__
263 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi"
264 PetscErrorCode  PCBJacobiSetUseTrueLocal_BJacobi(PC pc)
265 {
266   PC_BJacobi   *jac;
267 
268   PetscFunctionBegin;
269   jac                 = (PC_BJacobi*)pc->data;
270   jac->use_true_local = PETSC_TRUE;
271   PetscFunctionReturn(0);
272 }
273 EXTERN_C_END
274 
275 EXTERN_C_BEGIN
276 #undef __FUNCT__
277 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi"
278 PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
279 {
280   PC_BJacobi   *jac = (PC_BJacobi*)pc->data;;
281 
282   PetscFunctionBegin;
283   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
284 
285   if (n_local)     *n_local     = jac->n_local;
286   if (first_local) *first_local = jac->first_local;
287   *ksp                          = jac->ksp;
288   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
289                                                   not necessarily true though!  This flag is
290                                                   used only for PCView_BJacobi() */
291   PetscFunctionReturn(0);
292 }
293 EXTERN_C_END
294 
295 EXTERN_C_BEGIN
296 #undef __FUNCT__
297 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi"
298 PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
299 {
300   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
301   PetscErrorCode ierr;
302 
303   PetscFunctionBegin;
304 
305   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
306   jac->n = blocks;
307   if (!lens) {
308     jac->g_lens = 0;
309   } else {
310     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->g_lens);CHKERRQ(ierr);
311     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
312     ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
313   }
314   PetscFunctionReturn(0);
315 }
316 EXTERN_C_END
317 
318 EXTERN_C_BEGIN
319 #undef __FUNCT__
320 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi"
321 PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
322 {
323   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
324 
325   PetscFunctionBegin;
326   *blocks = jac->n;
327   if (lens) *lens = jac->g_lens;
328   PetscFunctionReturn(0);
329 }
330 EXTERN_C_END
331 
332 EXTERN_C_BEGIN
333 #undef __FUNCT__
334 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi"
335 PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
336 {
337   PC_BJacobi     *jac;
338   PetscErrorCode ierr;
339 
340   PetscFunctionBegin;
341   jac = (PC_BJacobi*)pc->data;
342 
343   jac->n_local = blocks;
344   if (!lens) {
345     jac->l_lens = 0;
346   } else {
347     ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
348     ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
349     ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
350   }
351   PetscFunctionReturn(0);
352 }
353 EXTERN_C_END
354 
355 EXTERN_C_BEGIN
356 #undef __FUNCT__
357 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi"
358 PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
359 {
360   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
361 
362   PetscFunctionBegin;
363   *blocks = jac->n_local;
364   if (lens) *lens = jac->l_lens;
365   PetscFunctionReturn(0);
366 }
367 EXTERN_C_END
368 
369 /* -------------------------------------------------------------------------------------*/
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "PCBJacobiSetUseTrueLocal"
373 /*@
374    PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block
375    problem is associated with the linear system matrix instead of the
376    default (where it is associated with the preconditioning matrix).
377    That is, if the local system is solved iteratively then it iterates
378    on the block from the matrix using the block from the preconditioner
379    as the preconditioner for the local block.
380 
381    Logically Collective on PC
382 
383    Input Parameters:
384 .  pc - the preconditioner context
385 
386    Options Database Key:
387 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
388 
389    Notes:
390    For the common case in which the preconditioning and linear
391    system matrices are identical, this routine is unnecessary.
392 
393    Level: intermediate
394 
395 .keywords:  block, Jacobi, set, true, local, flag
396 
397 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks()
398 @*/
399 PetscErrorCode  PCBJacobiSetUseTrueLocal(PC pc)
400 {
401   PetscErrorCode ierr;
402 
403   PetscFunctionBegin;
404   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
405   ierr = PetscTryMethod(pc,"PCBJacobiSetUseTrueLocal_C",(PC),(pc));CHKERRQ(ierr);
406   PetscFunctionReturn(0);
407 }
408 
409 #undef __FUNCT__
410 #define __FUNCT__ "PCBJacobiGetSubKSP"
411 /*@C
412    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
413    this processor.
414 
415    Note Collective
416 
417    Input Parameter:
418 .  pc - the preconditioner context
419 
420    Output Parameters:
421 +  n_local - the number of blocks on this processor, or PETSC_NULL
422 .  first_local - the global number of the first block on this processor, or PETSC_NULL
423 -  ksp - the array of KSP contexts
424 
425    Notes:
426    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
427 
428    Currently for some matrix implementations only 1 block per processor
429    is supported.
430 
431    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
432 
433    Level: advanced
434 
435 .keywords:  block, Jacobi, get, sub, KSP, context
436 
437 .seealso: PCBJacobiGetSubKSP()
438 @*/
439 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
440 {
441   PetscErrorCode ierr;
442 
443   PetscFunctionBegin;
444   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
445   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt *,PetscInt *,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 #undef __FUNCT__
450 #define __FUNCT__ "PCBJacobiSetTotalBlocks"
451 /*@
452    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
453    Jacobi preconditioner.
454 
455    Collective on PC
456 
457    Input Parameters:
458 +  pc - the preconditioner context
459 .  blocks - the number of blocks
460 -  lens - [optional] integer array containing the size of each block
461 
462    Options Database Key:
463 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
464 
465    Notes:
466    Currently only a limited number of blocking configurations are supported.
467    All processors sharing the PC must call this routine with the same data.
468 
469    Level: intermediate
470 
471 .keywords:  set, number, Jacobi, global, total, blocks
472 
473 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks()
474 @*/
475 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
476 {
477   PetscErrorCode ierr;
478 
479   PetscFunctionBegin;
480   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
481   if (blocks <= 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
482   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "PCBJacobiGetTotalBlocks"
488 /*@C
489    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
490    Jacobi preconditioner.
491 
492    Not Collective
493 
494    Input Parameter:
495 .  pc - the preconditioner context
496 
497    Output parameters:
498 +  blocks - the number of blocks
499 -  lens - integer array containing the size of each block
500 
501    Level: intermediate
502 
503 .keywords:  get, number, Jacobi, global, total, blocks
504 
505 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks()
506 @*/
507 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
508 {
509   PetscErrorCode ierr;
510 
511   PetscFunctionBegin;
512   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
513   PetscValidIntPointer(blocks,2);
514   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 #undef __FUNCT__
519 #define __FUNCT__ "PCBJacobiSetLocalBlocks"
520 /*@
521    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
522    Jacobi preconditioner.
523 
524    Not Collective
525 
526    Input Parameters:
527 +  pc - the preconditioner context
528 .  blocks - the number of blocks
529 -  lens - [optional] integer array containing size of each block
530 
531    Note:
532    Currently only a limited number of blocking configurations are supported.
533 
534    Level: intermediate
535 
536 .keywords: PC, set, number, Jacobi, local, blocks
537 
538 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks()
539 @*/
540 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
541 {
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
546   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
547   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
548   PetscFunctionReturn(0);
549 }
550 
551 #undef __FUNCT__
552 #define __FUNCT__ "PCBJacobiGetLocalBlocks"
553 /*@C
554    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
555    Jacobi preconditioner.
556 
557    Not Collective
558 
559    Input Parameters:
560 +  pc - the preconditioner context
561 .  blocks - the number of blocks
562 -  lens - [optional] integer array containing size of each block
563 
564    Note:
565    Currently only a limited number of blocking configurations are supported.
566 
567    Level: intermediate
568 
569 .keywords: PC, get, number, Jacobi, local, blocks
570 
571 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks()
572 @*/
573 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
574 {
575   PetscErrorCode ierr;
576 
577   PetscFunctionBegin;
578   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
579   PetscValidIntPointer(blocks,2);
580   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
581   PetscFunctionReturn(0);
582 }
583 
584 /* -----------------------------------------------------------------------------------*/
585 
586 /*MC
587    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
588            its own KSP object.
589 
590    Options Database Keys:
591 .  -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal()
592 
593    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
594      than one processor. Defaults to one block per processor.
595 
596      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
597         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
598 
599      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
600          and set the options directly on the resulting KSP object (you can access its PC
601          KSPGetPC())
602 
603    Level: beginner
604 
605    Concepts: block Jacobi
606 
607    Developer Notes: This preconditioner does not currently work with CUDA/CUSP for a couple of reasons.
608        (1) It creates seq vectors as work vectors that should be cusp
609        (2) The use of VecPlaceArray() is not handled properly by CUSP (that is it will not know where
610            the ownership of the vector is so may use wrong values) even if it did know the ownership
611            it may induce extra copy ups and downs. Satish suggests a VecTransplantArray() to handle two
612            vectors sharing the same pointer and handling the CUSP side as well instead of VecGetArray()/VecPlaceArray().
613 
614 
615 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
616            PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
617            PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
618 M*/
619 
620 EXTERN_C_BEGIN
621 #undef __FUNCT__
622 #define __FUNCT__ "PCCreate_BJacobi"
623 PetscErrorCode  PCCreate_BJacobi(PC pc)
624 {
625   PetscErrorCode ierr;
626   PetscMPIInt    rank;
627   PC_BJacobi     *jac;
628 
629   PetscFunctionBegin;
630   ierr = PetscNewLog(pc,PC_BJacobi,&jac);CHKERRQ(ierr);
631   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
632   pc->ops->apply              = 0;
633   pc->ops->applytranspose     = 0;
634   pc->ops->setup              = PCSetUp_BJacobi;
635   pc->ops->destroy            = PCDestroy_BJacobi;
636   pc->ops->setfromoptions     = PCSetFromOptions_BJacobi;
637   pc->ops->view               = PCView_BJacobi;
638   pc->ops->applyrichardson    = 0;
639 
640   pc->data               = (void*)jac;
641   jac->n                 = -1;
642   jac->n_local           = -1;
643   jac->first_local       = rank;
644   jac->ksp               = 0;
645   jac->use_true_local    = PETSC_FALSE;
646   jac->same_local_solves = PETSC_TRUE;
647   jac->g_lens            = 0;
648   jac->l_lens            = 0;
649   jac->psubcomm          = 0;
650 
651   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C",
652                     "PCBJacobiSetUseTrueLocal_BJacobi",
653                     PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr);
654   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi",
655                     PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
656   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi",
657                     PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
658   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi",
659                     PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
660   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi",
661                     PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
662   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi",
663                     PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
664 
665   PetscFunctionReturn(0);
666 }
667 EXTERN_C_END
668 
669 /* --------------------------------------------------------------------------------------------*/
670 /*
671         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
672 */
673 #undef __FUNCT__
674 #define __FUNCT__ "PCReset_BJacobi_Singleblock"
675 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
676 {
677   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
678   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
679   PetscErrorCode         ierr;
680 
681   PetscFunctionBegin;
682   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
683   ierr = VecDestroy(&bjac->x);CHKERRQ(ierr);
684   ierr = VecDestroy(&bjac->y);CHKERRQ(ierr);
685   PetscFunctionReturn(0);
686 }
687 
688 #undef __FUNCT__
689 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock"
690 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
691 {
692   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
693   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
694   PetscErrorCode         ierr;
695 
696   PetscFunctionBegin;
697   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
698   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
699   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
700   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
701   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
702   ierr = PetscFree(bjac);CHKERRQ(ierr);
703   ierr = PetscFree(pc->data);CHKERRQ(ierr);
704   PetscFunctionReturn(0);
705 }
706 
707 #undef __FUNCT__
708 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock"
709 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
710 {
711   PetscErrorCode ierr;
712   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
713 
714   PetscFunctionBegin;
715   ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr);
716   PetscFunctionReturn(0);
717 }
718 
719 #undef __FUNCT__
720 #define __FUNCT__ "PCApply_BJacobi_Singleblock"
721 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
722 {
723   PetscErrorCode         ierr;
724   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
725   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
726   PetscScalar            *x_array,*y_array;
727 
728   PetscFunctionBegin;
729   /*
730       The VecPlaceArray() is to avoid having to copy the
731     y vector into the bjac->x vector. The reason for
732     the bjac->x vector is that we need a sequential vector
733     for the sequential solve.
734   */
735   //ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
736   //ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
737   //ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
738   //ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
739   ierr = VecTransplantPlaceArray(x,bjac->x);CHKERRQ(ierr);
740   ierr = VecTransplantPlaceArray(y,bjac->y);CHKERRQ(ierr);
741 
742   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
743 
744   ierr = VecTransplantResetArray(bjac->x,x);CHKERRQ(ierr);
745   ierr = VecTransplantResetArray(bjac->y,y);CHKERRQ(ierr);
746 
747   //ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
748   //ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
749   //ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
750   //ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 #undef __FUNCT__
755 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock"
756 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
757 {
758   PetscErrorCode         ierr;
759   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
760   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
761   PetscScalar            *x_array,*y_array;
762   PC                     subpc;
763 
764   PetscFunctionBegin;
765   /*
766       The VecPlaceArray() is to avoid having to copy the
767     y vector into the bjac->x vector. The reason for
768     the bjac->x vector is that we need a sequential vector
769     for the sequential solve.
770   */
771   //ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
772   //ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
773   //ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
774   //ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
775 
776   ierr = VecTransplantPlaceArray(x,bjac->x);CHKERRQ(ierr);
777   ierr = VecTransplantPlaceArray(y,bjac->y);CHKERRQ(ierr);
778 
779   /* apply the symmetric left portion of the inner PC operator */
780   /* note this by-passes the inner KSP and its options completely */
781 
782   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
783   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
784 
785   ierr = VecTransplantResetArray(bjac->x,x);CHKERRQ(ierr);
786   ierr = VecTransplantResetArray(bjac->y,y);CHKERRQ(ierr);
787 
788   //ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
789   //ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
790   //ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
791   //ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
792   PetscFunctionReturn(0);
793 }
794 
795 #undef __FUNCT__
796 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock"
797 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
798 {
799   PetscErrorCode         ierr;
800   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
801   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
802   PetscScalar            *x_array,*y_array;
803   PC                     subpc;
804 
805   PetscFunctionBegin;
806   /*
807       The VecPlaceArray() is to avoid having to copy the
808     y vector into the bjac->x vector. The reason for
809     the bjac->x vector is that we need a sequential vector
810     for the sequential solve.
811   */
812   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
813   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
814   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
815   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
816 
817   /* apply the symmetric right portion of the inner PC operator */
818   /* note this by-passes the inner KSP and its options completely */
819 
820   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
821   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
822 
823   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
824   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock"
830 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
831 {
832   PetscErrorCode         ierr;
833   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
834   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
835   PetscScalar            *x_array,*y_array;
836 
837   PetscFunctionBegin;
838   /*
839       The VecPlaceArray() is to avoid having to copy the
840     y vector into the bjac->x vector. The reason for
841     the bjac->x vector is that we need a sequential vector
842     for the sequential solve.
843   */
844   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
845   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
846   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
847   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
848   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
849   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
850   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
851   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
852   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock"
858 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
859 {
860   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
861   PetscErrorCode         ierr;
862   PetscInt               m;
863   KSP                    ksp;
864   PC_BJacobi_Singleblock *bjac;
865   PetscBool              wasSetup = PETSC_TRUE;
866 
867   PetscFunctionBegin;
868 
869   if (!pc->setupcalled) {
870     const char *prefix;
871 
872     if (!jac->ksp) {
873       wasSetup = PETSC_FALSE;
874       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
875       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
876       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
877       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
878       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
879       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
880       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
881 
882       pc->ops->reset               = PCReset_BJacobi_Singleblock;
883       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
884       pc->ops->apply               = PCApply_BJacobi_Singleblock;
885       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
886       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
887       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
888       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
889 
890       ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
891       jac->ksp[0] = ksp;
892 
893       ierr = PetscNewLog(pc,PC_BJacobi_Singleblock,&bjac);CHKERRQ(ierr);
894       jac->data    = (void*)bjac;
895     } else {
896       ksp  = jac->ksp[0];
897       bjac = (PC_BJacobi_Singleblock *)jac->data;
898     }
899 
900     /*
901       The reason we need to generate these vectors is to serve
902       as the right-hand side and solution vector for the solve on the
903       block. We do not need to allocate space for the vectors since
904       that is provided via VecPlaceArray() just before the call to
905       KSPSolve() on the block.
906     */
907     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
908     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,PETSC_NULL,&bjac->x);CHKERRQ(ierr);
909     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,PETSC_NULL,&bjac->y);CHKERRQ(ierr);
910     ierr = PetscLogObjectParent(pc,bjac->x);CHKERRQ(ierr);
911     ierr = PetscLogObjectParent(pc,bjac->y);CHKERRQ(ierr);
912   } else {
913     ksp = jac->ksp[0];
914     bjac = (PC_BJacobi_Singleblock *)jac->data;
915   }
916   if (jac->use_true_local) {
917     ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr);
918   }  else {
919     ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr);
920   }
921   if (!wasSetup && pc->setfromoptionscalled) {
922     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
923   }
924   PetscFunctionReturn(0);
925 }
926 
927 /* ---------------------------------------------------------------------------------------------*/
928 #undef __FUNCT__
929 #define __FUNCT__ "PCReset_BJacobi_Multiblock"
930 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
931 {
932   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
933   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
934   PetscErrorCode        ierr;
935   PetscInt              i;
936 
937   PetscFunctionBegin;
938   if (bjac && bjac->pmat) {
939     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
940     if (jac->use_true_local) {
941       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
942     }
943   }
944 
945   for (i=0; i<jac->n_local; i++) {
946     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
947     if (bjac && bjac->x) {
948       ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr);
949       ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr);
950       ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr);
951     }
952   }
953   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
954   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 #undef __FUNCT__
959 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock"
960 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
961 {
962   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
963   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
964   PetscErrorCode        ierr;
965   PetscInt              i;
966 
967   PetscFunctionBegin;
968   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
969   if (bjac) {
970     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
971     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
972     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
973   }
974   ierr = PetscFree(jac->data);CHKERRQ(ierr);
975   for (i=0; i<jac->n_local; i++) {
976     ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr);
977   }
978   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
979   ierr = PetscFree(pc->data);CHKERRQ(ierr);
980   PetscFunctionReturn(0);
981 }
982 
983 #undef __FUNCT__
984 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock"
985 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
986 {
987   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
988   PetscErrorCode ierr;
989   PetscInt       i,n_local = jac->n_local;
990 
991   PetscFunctionBegin;
992   for (i=0; i<n_local; i++) {
993     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
994   }
995   PetscFunctionReturn(0);
996 }
997 
998 /*
999       Preconditioner for block Jacobi
1000 */
1001 #undef __FUNCT__
1002 #define __FUNCT__ "PCApply_BJacobi_Multiblock"
1003 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1004 {
1005   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1006   PetscErrorCode        ierr;
1007   PetscInt              i,n_local = jac->n_local;
1008   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1009   PetscScalar           *xin,*yin;
1010 
1011   PetscFunctionBegin;
1012   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1013   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1014   for (i=0; i<n_local; i++) {
1015     /*
1016        To avoid copying the subvector from x into a workspace we instead
1017        make the workspace vector array point to the subpart of the array of
1018        the global vector.
1019     */
1020     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1021     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1022 
1023     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1024     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1025     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1026 
1027     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
1028     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
1029   }
1030   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1031   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 /*
1036       Preconditioner for block Jacobi
1037 */
1038 #undef __FUNCT__
1039 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock"
1040 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
1041 {
1042   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
1043   PetscErrorCode        ierr;
1044   PetscInt              i,n_local = jac->n_local;
1045   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
1046   PetscScalar           *xin,*yin;
1047 
1048   PetscFunctionBegin;
1049   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
1050   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
1051   for (i=0; i<n_local; i++) {
1052     /*
1053        To avoid copying the subvector from x into a workspace we instead
1054        make the workspace vector array point to the subpart of the array of
1055        the global vector.
1056     */
1057     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
1058     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
1059 
1060     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1061     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
1062     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
1063 
1064     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
1065     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
1066   }
1067   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
1068   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 #undef __FUNCT__
1073 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock"
1074 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
1075 {
1076   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
1077   PetscErrorCode         ierr;
1078   PetscInt               m,n_local,N,M,start,i;
1079   const char             *prefix,*pprefix,*mprefix;
1080   KSP                    ksp;
1081   Vec                    x,y;
1082   PC_BJacobi_Multiblock  *bjac = (PC_BJacobi_Multiblock*)jac->data;
1083   PC                     subpc;
1084   IS                     is;
1085   MatReuse               scall;
1086 
1087   PetscFunctionBegin;
1088   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
1089 
1090   n_local = jac->n_local;
1091 
1092   if (jac->use_true_local) {
1093     PetscBool  same;
1094     ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
1095     if (!same) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1096   }
1097 
1098   if (!pc->setupcalled) {
1099     scall                  = MAT_INITIAL_MATRIX;
1100 
1101     if (!jac->ksp) {
1102       pc->ops->reset         = PCReset_BJacobi_Multiblock;
1103       pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1104       pc->ops->apply         = PCApply_BJacobi_Multiblock;
1105       pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1106       pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1107 
1108       ierr = PetscNewLog(pc,PC_BJacobi_Multiblock,&bjac);CHKERRQ(ierr);
1109       ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1110       ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1111       ierr = PetscMalloc2(n_local,Vec,&bjac->x,n_local,Vec,&bjac->y);CHKERRQ(ierr);
1112       ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr);
1113       ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1114 
1115       jac->data    = (void*)bjac;
1116       ierr = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr);
1117       ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1118 
1119       for (i=0; i<n_local; i++) {
1120         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1121         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1122         ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
1123         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1124         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1125         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1126         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1127         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1128         jac->ksp[i]    = ksp;
1129       }
1130     } else {
1131       bjac = (PC_BJacobi_Multiblock*)jac->data;
1132     }
1133 
1134     start = 0;
1135     for (i=0; i<n_local; i++) {
1136       m = jac->l_lens[i];
1137       /*
1138       The reason we need to generate these vectors is to serve
1139       as the right-hand side and solution vector for the solve on the
1140       block. We do not need to allocate space for the vectors since
1141       that is provided via VecPlaceArray() just before the call to
1142       KSPSolve() on the block.
1143 
1144       */
1145       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1146       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,PETSC_NULL,&y);CHKERRQ(ierr);
1147       ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr);
1148       ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr);
1149       bjac->x[i]      = x;
1150       bjac->y[i]      = y;
1151       bjac->starts[i] = start;
1152 
1153       ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1154       bjac->is[i] = is;
1155       ierr = PetscLogObjectParent(pc,is);CHKERRQ(ierr);
1156 
1157       start += m;
1158     }
1159   } else {
1160     bjac = (PC_BJacobi_Multiblock*)jac->data;
1161     /*
1162        Destroy the blocks from the previous iteration
1163     */
1164     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1165       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1166       if (jac->use_true_local) {
1167         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1168       }
1169       scall = MAT_INITIAL_MATRIX;
1170     } else {
1171       scall = MAT_REUSE_MATRIX;
1172     }
1173   }
1174 
1175   ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1176   if (jac->use_true_local) {
1177     ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr);
1178     ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1179   }
1180   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1181      different boundary conditions for the submatrices than for the global problem) */
1182   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1183 
1184   ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr);
1185   for (i=0; i<n_local; i++) {
1186     ierr = PetscLogObjectParent(pc,bjac->pmat[i]);CHKERRQ(ierr);
1187     ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr);
1188     if (jac->use_true_local) {
1189       ierr = PetscLogObjectParent(pc,bjac->mat[i]);CHKERRQ(ierr);
1190       ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr);
1191       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1192     } else {
1193       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr);
1194     }
1195     if(pc->setfromoptionscalled){
1196       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1197     }
1198   }
1199   PetscFunctionReturn(0);
1200 }
1201 
1202 /* ---------------------------------------------------------------------------------------------*/
1203 /*
1204       These are for a single block with multiple processes;
1205 */
1206 #undef __FUNCT__
1207 #define __FUNCT__ "PCReset_BJacobi_Multiproc"
1208 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1209 {
1210   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1211   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1212   PetscErrorCode       ierr;
1213 
1214   PetscFunctionBegin;
1215   ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr);
1216   ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr);
1217   ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);
1218   if (jac->ksp){ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);}
1219   PetscFunctionReturn(0);
1220 }
1221 
1222 #undef __FUNCT__
1223 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc"
1224 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1225 {
1226   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1227   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1228   PetscErrorCode       ierr;
1229 
1230   PetscFunctionBegin;
1231   ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr);
1232   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
1233   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1234   ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr);
1235 
1236   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1237   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1238   PetscFunctionReturn(0);
1239 }
1240 
1241 #undef __FUNCT__
1242 #define __FUNCT__ "PCApply_BJacobi_Multiproc"
1243 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1244 {
1245   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1246   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1247   PetscErrorCode       ierr;
1248   PetscScalar          *xarray,*yarray;
1249 
1250   PetscFunctionBegin;
1251   /* place x's and y's local arrays into xsub and ysub */
1252   ierr = VecGetArray(x,&xarray);CHKERRQ(ierr);
1253   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1254   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1255   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1256 
1257   /* apply preconditioner on each matrix block */
1258   ierr = PetscLogEventBegin(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1259   ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1260   ierr = PetscLogEventEnd(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1261 
1262   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1263   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1264   ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1265   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1266   PetscFunctionReturn(0);
1267 }
1268 
1269 extern PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,MatReuse,Mat*);
1270 #undef __FUNCT__
1271 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc"
1272 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1273 {
1274   PC_BJacobi           *jac = (PC_BJacobi*)pc->data;
1275   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1276   PetscErrorCode       ierr;
1277   PetscInt             m,n;
1278   MPI_Comm             comm = ((PetscObject)pc)->comm,subcomm=0;
1279   const char           *prefix;
1280   PetscBool            wasSetup = PETSC_TRUE;
1281 
1282   PetscFunctionBegin;
1283   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1284   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1285   if (!pc->setupcalled) {
1286     wasSetup = PETSC_FALSE;
1287     ierr = PetscNewLog(pc,PC_BJacobi_Multiproc,&mpjac);CHKERRQ(ierr);
1288     jac->data = (void*)mpjac;
1289 
1290     /* initialize datastructure mpjac */
1291     if (!jac->psubcomm) {
1292       /* Create default contiguous subcommunicatiors if user does not provide them */
1293       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1294       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1295       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1296       ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1297     }
1298     mpjac->psubcomm = jac->psubcomm;
1299     subcomm         = mpjac->psubcomm->comm;
1300 
1301     /* Get matrix blocks of pmat */
1302     ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1303 
1304     /* create a new PC that processors in each subcomm have copy of */
1305     ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1306     ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr);
1307     ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr);
1308     ierr = PetscLogObjectParent(pc,jac->ksp[0]);CHKERRQ(ierr);
1309     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr);
1310     ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr);
1311 
1312     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1313     ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr);
1314     ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr);
1315     /*
1316       PetscMPIInt rank,subsize,subrank;
1317       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1318       ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
1319       ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
1320 
1321       ierr = MatGetLocalSize(mpjac->submats,&m,PETSC_NULL);CHKERRQ(ierr);
1322       ierr = MatGetSize(mpjac->submats,&n,PETSC_NULL);CHKERRQ(ierr);
1323       ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank);
1324       ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
1325     */
1326 
1327     /* create dummy vectors xsub and ysub */
1328     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1329     ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,PETSC_NULL,&mpjac->xsub);CHKERRQ(ierr);
1330     ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,PETSC_NULL,&mpjac->ysub);CHKERRQ(ierr);
1331     ierr = PetscLogObjectParent(pc,mpjac->xsub);CHKERRQ(ierr);
1332     ierr = PetscLogObjectParent(pc,mpjac->ysub);CHKERRQ(ierr);
1333 
1334     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1335     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1336     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1337   } else { /* pc->setupcalled */
1338     subcomm = mpjac->psubcomm->comm;
1339     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1340       /* destroy old matrix blocks, then get new matrix blocks */
1341       if (mpjac->submats){ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);}
1342       ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1343     } else {
1344       ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1345     }
1346     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr);
1347   }
1348 
1349   if (!wasSetup && pc->setfromoptionscalled){
1350     ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr);
1351   }
1352   PetscFunctionReturn(0);
1353 }
1354