xref: /petsc/src/ksp/pc/impls/bjacobi/bjacobi.c (revision ebc8f436579ae4907d6cb7dbf060cef5ed1ab4cb)
1 
2 /*
3    Defines a block Jacobi preconditioner.
4 */
5 #include <petsc-private/pcimpl.h>              /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h>
7 
8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat);
9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat);
10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC);
11 
12 #undef __FUNCT__
13 #define __FUNCT__ "PCSetUp_BJacobi"
14 static PetscErrorCode PCSetUp_BJacobi(PC pc)
15 {
16   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
17   Mat            mat  = pc->mat,pmat = pc->pmat;
18   PetscErrorCode ierr,(*f)(Mat,Mat*);
19   PetscInt       N,M,start,i,sum,end;
20   PetscInt       bs,i_start=-1,i_end=-1;
21   PetscMPIInt    rank,size;
22   const char     *pprefix,*mprefix;
23 
24   PetscFunctionBegin;
25   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
26   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
27   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
28   ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
29 
30   if (jac->n > 0 && jac->n < size) {
31     ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr);
32     PetscFunctionReturn(0);
33   }
34 
35   /* --------------------------------------------------------------------------
36       Determines the number of blocks assigned to each processor
37   -----------------------------------------------------------------------------*/
38 
39   /*   local block count  given */
40   if (jac->n_local > 0 && jac->n < 0) {
41     ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
42     if (jac->l_lens) { /* check that user set these correctly */
43       sum = 0;
44       for (i=0; i<jac->n_local; i++) {
45         if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
46         sum += jac->l_lens[i];
47       }
48       if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly");
49     } else {
50       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
51       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
52     }
53   } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */
54     /* global blocks given: determine which ones are local */
55     if (jac->g_lens) {
56       /* check if the g_lens is has valid entries */
57       for (i=0; i<jac->n; i++) {
58         if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed");
59         if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout");
60       }
61       if (size == 1) {
62         jac->n_local = jac->n;
63         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
64         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
65         /* check that user set these correctly */
66         sum = 0;
67         for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i];
68         if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly");
69       } else {
70         ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr);
71         /* loop over blocks determing first one owned by me */
72         sum = 0;
73         for (i=0; i<jac->n+1; i++) {
74           if (sum == start) { i_start = i; goto start_1;}
75           if (i < jac->n) sum += jac->g_lens[i];
76         }
77         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
78 start_1:
79         for (i=i_start; i<jac->n+1; i++) {
80           if (sum == end) { i_end = i; goto end_1; }
81           if (i < jac->n) sum += jac->g_lens[i];
82         }
83         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout");
84 end_1:
85         jac->n_local = i_end - i_start;
86         ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
87         ierr         = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr);
88       }
89     } else { /* no global blocks given, determine then using default layout */
90       jac->n_local = jac->n/size + ((jac->n % size) > rank);
91       ierr         = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
92       for (i=0; i<jac->n_local; i++) {
93         jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs;
94         if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given");
95       }
96     }
97   } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */
98     jac->n         = size;
99     jac->n_local   = 1;
100     ierr           = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr);
101     jac->l_lens[0] = M;
102   } else { /* jac->n > 0 && jac->n_local > 0 */
103     if (!jac->l_lens) {
104       ierr = PetscMalloc1(jac->n_local,&jac->l_lens);CHKERRQ(ierr);
105       for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i));
106     }
107   }
108   if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors");
109 
110   /* -------------------------
111       Determines mat and pmat
112   ---------------------------*/
113   ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
114   if (!f && size == 1) {
115     mat  = pc->mat;
116     pmat = pc->pmat;
117   } else {
118     if (pc->useAmat) {
119       /* use block from Amat matrix, not Pmat for local MatMult() */
120       ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr);
121       /* make submatrix have same prefix as entire matrix */
122       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr);
123       ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr);
124     }
125     if (pc->pmat != pc->mat || !pc->useAmat) {
126       ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr);
127       /* make submatrix have same prefix as entire matrix */
128       ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
129       ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr);
130     } else pmat = mat;
131   }
132 
133   /* ------
134      Setup code depends on the number of blocks
135   */
136   if (jac->n_local == 1) {
137     ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr);
138   } else {
139     ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr);
140   }
141   PetscFunctionReturn(0);
142 }
143 
144 /* Default destroy, if it has never been setup */
145 #undef __FUNCT__
146 #define __FUNCT__ "PCDestroy_BJacobi"
147 static PetscErrorCode PCDestroy_BJacobi(PC pc)
148 {
149   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
150   PetscErrorCode ierr;
151 
152   PetscFunctionBegin;
153   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
154   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
155   ierr = PetscFree(pc->data);CHKERRQ(ierr);
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "PCSetFromOptions_BJacobi"
161 
162 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc)
163 {
164   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
165   PetscErrorCode ierr;
166   PetscInt       blocks,i;
167   PetscBool      flg;
168 
169   PetscFunctionBegin;
170   ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr);
171   ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr);
172   if (flg) {
173     ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr);
174   }
175   if (jac->ksp) {
176     /* The sub-KSP has already been set up (e.g., PCSetUp_BJacobi_Singleblock), but KSPSetFromOptions was not called
177      * unless we had already been called. */
178     for (i=0; i<jac->n_local; i++) {
179       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
180     }
181   }
182   ierr = PetscOptionsTail();CHKERRQ(ierr);
183   PetscFunctionReturn(0);
184 }
185 
186 #include <petscdraw.h>
187 #undef __FUNCT__
188 #define __FUNCT__ "PCView_BJacobi"
189 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer)
190 {
191   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
192   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
193   PetscErrorCode       ierr;
194   PetscMPIInt          rank;
195   PetscInt             i;
196   PetscBool            iascii,isstring,isdraw;
197   PetscViewer          sviewer;
198 
199   PetscFunctionBegin;
200   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
201   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
202   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
203   if (iascii) {
204     if (pc->useAmat) {
205       ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: using Amat local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr);
206     }
207     ierr = PetscViewerASCIIPrintf(viewer,"  block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr);
208     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
209     if (jac->same_local_solves) {
210       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
211       if (jac->ksp && !jac->psubcomm) {
212         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
213         if (!rank) {
214           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
215           ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
216           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
217         }
218         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
219       } else if (jac->psubcomm && !jac->psubcomm->color) {
220         ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr);
221         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
222         ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr);
223         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
224       }
225     } else {
226       PetscInt n_global;
227       ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
228       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
229       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
230       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n",
231                                                 rank,jac->n_local,jac->first_local);CHKERRQ(ierr);
232       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
233       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
234       for (i=0; i<jac->n_local; i++) {
235         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr);
236         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
237         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
238       }
239       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
240       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
241       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
242       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
243     }
244   } else if (isstring) {
245     ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr);
246     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
247     if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
248     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
249   } else if (isdraw) {
250     PetscDraw draw;
251     char      str[25];
252     PetscReal x,y,bottom,h;
253 
254     ierr   = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
255     ierr   = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr);
256     ierr   = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr);
257     ierr   = PetscDrawBoxedString(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr);
258     bottom = y - h;
259     ierr   = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr);
260     /* warning the communicator on viewer is different then on ksp in parallel */
261     if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);}
262     ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr);
263   }
264   PetscFunctionReturn(0);
265 }
266 
267 /* -------------------------------------------------------------------------------------*/
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi"
271 static PetscErrorCode  PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
272 {
273   PC_BJacobi *jac = (PC_BJacobi*)pc->data;;
274 
275   PetscFunctionBegin;
276   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first");
277 
278   if (n_local) *n_local = jac->n_local;
279   if (first_local) *first_local = jac->first_local;
280   *ksp                   = jac->ksp;
281   jac->same_local_solves = PETSC_FALSE;        /* Assume that local solves are now different;
282                                                   not necessarily true though!  This flag is
283                                                   used only for PCView_BJacobi() */
284   PetscFunctionReturn(0);
285 }
286 
287 #undef __FUNCT__
288 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi"
289 static PetscErrorCode  PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens)
290 {
291   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
292   PetscErrorCode ierr;
293 
294   PetscFunctionBegin;
295   if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called");
296   jac->n = blocks;
297   if (!lens) jac->g_lens = 0;
298   else {
299     ierr = PetscMalloc1(blocks,&jac->g_lens);CHKERRQ(ierr);
300     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
301     ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
302   }
303   PetscFunctionReturn(0);
304 }
305 
306 #undef __FUNCT__
307 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi"
308 static PetscErrorCode  PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
309 {
310   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
311 
312   PetscFunctionBegin;
313   *blocks = jac->n;
314   if (lens) *lens = jac->g_lens;
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi"
320 static PetscErrorCode  PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[])
321 {
322   PC_BJacobi     *jac;
323   PetscErrorCode ierr;
324 
325   PetscFunctionBegin;
326   jac = (PC_BJacobi*)pc->data;
327 
328   jac->n_local = blocks;
329   if (!lens) jac->l_lens = 0;
330   else {
331     ierr = PetscMalloc1(blocks,&jac->l_lens);CHKERRQ(ierr);
332     ierr = PetscLogObjectMemory((PetscObject)pc,blocks*sizeof(PetscInt));CHKERRQ(ierr);
333     ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr);
334   }
335   PetscFunctionReturn(0);
336 }
337 
338 #undef __FUNCT__
339 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi"
340 static PetscErrorCode  PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[])
341 {
342   PC_BJacobi *jac = (PC_BJacobi*) pc->data;
343 
344   PetscFunctionBegin;
345   *blocks = jac->n_local;
346   if (lens) *lens = jac->l_lens;
347   PetscFunctionReturn(0);
348 }
349 
350 /* -------------------------------------------------------------------------------------*/
351 
352 #undef __FUNCT__
353 #define __FUNCT__ "PCBJacobiGetSubKSP"
354 /*@C
355    PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on
356    this processor.
357 
358    Note Collective
359 
360    Input Parameter:
361 .  pc - the preconditioner context
362 
363    Output Parameters:
364 +  n_local - the number of blocks on this processor, or NULL
365 .  first_local - the global number of the first block on this processor, or NULL
366 -  ksp - the array of KSP contexts
367 
368    Notes:
369    After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed.
370 
371    Currently for some matrix implementations only 1 block per processor
372    is supported.
373 
374    You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP().
375 
376    Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs.
377       You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,NULL_OBJECT,ierr) to determine how large the
378       KSP array must be.
379 
380    Level: advanced
381 
382 .keywords:  block, Jacobi, get, sub, KSP, context
383 
384 .seealso: PCBJacobiGetSubKSP()
385 @*/
386 PetscErrorCode  PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
387 {
388   PetscErrorCode ierr;
389 
390   PetscFunctionBegin;
391   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
392   ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 #undef __FUNCT__
397 #define __FUNCT__ "PCBJacobiSetTotalBlocks"
398 /*@
399    PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block
400    Jacobi preconditioner.
401 
402    Collective on PC
403 
404    Input Parameters:
405 +  pc - the preconditioner context
406 .  blocks - the number of blocks
407 -  lens - [optional] integer array containing the size of each block
408 
409    Options Database Key:
410 .  -pc_bjacobi_blocks <blocks> - Sets the number of global blocks
411 
412    Notes:
413    Currently only a limited number of blocking configurations are supported.
414    All processors sharing the PC must call this routine with the same data.
415 
416    Level: intermediate
417 
418 .keywords:  set, number, Jacobi, global, total, blocks
419 
420 .seealso: PCSetUseAmat(), PCBJacobiSetLocalBlocks()
421 @*/
422 PetscErrorCode  PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
423 {
424   PetscErrorCode ierr;
425 
426   PetscFunctionBegin;
427   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
428   if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks");
429   ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr);
430   PetscFunctionReturn(0);
431 }
432 
433 #undef __FUNCT__
434 #define __FUNCT__ "PCBJacobiGetTotalBlocks"
435 /*@C
436    PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block
437    Jacobi preconditioner.
438 
439    Not Collective
440 
441    Input Parameter:
442 .  pc - the preconditioner context
443 
444    Output parameters:
445 +  blocks - the number of blocks
446 -  lens - integer array containing the size of each block
447 
448    Level: intermediate
449 
450 .keywords:  get, number, Jacobi, global, total, blocks
451 
452 .seealso: PCSetUseAmat(), PCBJacobiGetLocalBlocks()
453 @*/
454 PetscErrorCode  PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
455 {
456   PetscErrorCode ierr;
457 
458   PetscFunctionBegin;
459   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
460   PetscValidIntPointer(blocks,2);
461   ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
462   PetscFunctionReturn(0);
463 }
464 
465 #undef __FUNCT__
466 #define __FUNCT__ "PCBJacobiSetLocalBlocks"
467 /*@
468    PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block
469    Jacobi preconditioner.
470 
471    Not Collective
472 
473    Input Parameters:
474 +  pc - the preconditioner context
475 .  blocks - the number of blocks
476 -  lens - [optional] integer array containing size of each block
477 
478    Note:
479    Currently only a limited number of blocking configurations are supported.
480 
481    Level: intermediate
482 
483 .keywords: PC, set, number, Jacobi, local, blocks
484 
485 .seealso: PCSetUseAmat(), PCBJacobiSetTotalBlocks()
486 @*/
487 PetscErrorCode  PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[])
488 {
489   PetscErrorCode ierr;
490 
491   PetscFunctionBegin;
492   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
493   if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks");
494   ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr);
495   PetscFunctionReturn(0);
496 }
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "PCBJacobiGetLocalBlocks"
500 /*@C
501    PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block
502    Jacobi preconditioner.
503 
504    Not Collective
505 
506    Input Parameters:
507 +  pc - the preconditioner context
508 .  blocks - the number of blocks
509 -  lens - [optional] integer array containing size of each block
510 
511    Note:
512    Currently only a limited number of blocking configurations are supported.
513 
514    Level: intermediate
515 
516 .keywords: PC, get, number, Jacobi, local, blocks
517 
518 .seealso: PCSetUseAmat(), PCBJacobiGetTotalBlocks()
519 @*/
520 PetscErrorCode  PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[])
521 {
522   PetscErrorCode ierr;
523 
524   PetscFunctionBegin;
525   PetscValidHeaderSpecific(pc, PC_CLASSID,1);
526   PetscValidIntPointer(blocks,2);
527   ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr);
528   PetscFunctionReturn(0);
529 }
530 
531 /* -----------------------------------------------------------------------------------*/
532 
533 /*MC
534    PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with
535            its own KSP object.
536 
537    Options Database Keys:
538 .  -pc_use_amat - use Amat to apply block of operator in inner Krylov method
539 
540    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
541      than one processor. Defaults to one block per processor.
542 
543      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
544         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
545 
546      To set the options on the solvers separate for each block call PCBJacobiGetSubKSP()
547          and set the options directly on the resulting KSP object (you can access its PC
548          KSPGetPC())
549 
550    Level: beginner
551 
552    Concepts: block Jacobi
553 
554    Developer Notes: This preconditioner does not currently work with CUDA/CUSP for a couple of reasons.
555        (1) It creates seq vectors as work vectors that should be cusp
556        (2) The use of VecPlaceArray() is not handled properly by CUSP (that is it will not know where
557            the ownership of the vector is so may use wrong values) even if it did know the ownership
558            it may induce extra copy ups and downs. Satish suggests a VecTransplantArray() to handle two
559            vectors sharing the same pointer and handling the CUSP side as well instead of VecGetArray()/VecPlaceArray().
560 
561 
562 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
563            PCASM, PCSetUseAmat(), PCGetUseAmat(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(),
564            PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices()
565 M*/
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "PCCreate_BJacobi"
569 PETSC_EXTERN PetscErrorCode PCCreate_BJacobi(PC pc)
570 {
571   PetscErrorCode ierr;
572   PetscMPIInt    rank;
573   PC_BJacobi     *jac;
574 
575   PetscFunctionBegin;
576   ierr = PetscNewLog(pc,&jac);CHKERRQ(ierr);
577   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
578 
579   pc->ops->apply           = 0;
580   pc->ops->applytranspose  = 0;
581   pc->ops->setup           = PCSetUp_BJacobi;
582   pc->ops->destroy         = PCDestroy_BJacobi;
583   pc->ops->setfromoptions  = PCSetFromOptions_BJacobi;
584   pc->ops->view            = PCView_BJacobi;
585   pc->ops->applyrichardson = 0;
586 
587   pc->data               = (void*)jac;
588   jac->n                 = -1;
589   jac->n_local           = -1;
590   jac->first_local       = rank;
591   jac->ksp               = 0;
592   jac->same_local_solves = PETSC_TRUE;
593   jac->g_lens            = 0;
594   jac->l_lens            = 0;
595   jac->psubcomm          = 0;
596 
597   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr);
598   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr);
599   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr);
600   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr);
601   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr);
602   PetscFunctionReturn(0);
603 }
604 
605 /* --------------------------------------------------------------------------------------------*/
606 /*
607         These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI
608 */
609 #undef __FUNCT__
610 #define __FUNCT__ "PCReset_BJacobi_Singleblock"
611 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc)
612 {
613   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
614   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
615   PetscErrorCode         ierr;
616 
617   PetscFunctionBegin;
618   ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);
619   ierr = VecDestroy(&bjac->x);CHKERRQ(ierr);
620   ierr = VecDestroy(&bjac->y);CHKERRQ(ierr);
621   PetscFunctionReturn(0);
622 }
623 
624 #undef __FUNCT__
625 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock"
626 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc)
627 {
628   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
629   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
630   PetscErrorCode         ierr;
631 
632   PetscFunctionBegin;
633   ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr);
634   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
635   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
636   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
637   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
638   ierr = PetscFree(bjac);CHKERRQ(ierr);
639   ierr = PetscFree(pc->data);CHKERRQ(ierr);
640   PetscFunctionReturn(0);
641 }
642 
643 #undef __FUNCT__
644 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock"
645 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc)
646 {
647   PetscErrorCode ierr;
648   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
649 
650   PetscFunctionBegin;
651   ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr);
652   PetscFunctionReturn(0);
653 }
654 
655 #undef __FUNCT__
656 #define __FUNCT__ "PCApply_BJacobi_Singleblock"
657 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y)
658 {
659   PetscErrorCode         ierr;
660   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
661   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
662   PetscScalar            *x_array,*y_array;
663 
664   PetscFunctionBegin;
665   ierr = VecGetLocalVector(x, &bjac->x);CHKERRQ(ierr);
666   ierr = VecGetLocalVectorRead(y, &bjac->y);CHKERRQ(ierr);
667   ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
668   ierr = VecRestoreLocalVector(x, &bjac->x);CHKERRQ(ierr);
669   ierr = VecRestoreLocalVectorRead(y, &bjac->y);CHKERRQ(ierr);
670   PetscFunctionReturn(0);
671 }
672 
673 #undef __FUNCT__
674 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock"
675 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y)
676 {
677   PetscErrorCode         ierr;
678   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
679   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
680   PetscScalar            *x_array,*y_array;
681   PC                     subpc;
682 
683   PetscFunctionBegin;
684   /*
685       The VecPlaceArray() is to avoid having to copy the
686     y vector into the bjac->x vector. The reason for
687     the bjac->x vector is that we need a sequential vector
688     for the sequential solve.
689   */
690   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
691   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
692   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
693   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
694   /* apply the symmetric left portion of the inner PC operator */
695   /* note this by-passes the inner KSP and its options completely */
696   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
697   ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
698   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
699   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
700   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
701   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
702   PetscFunctionReturn(0);
703 }
704 
705 #undef __FUNCT__
706 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock"
707 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y)
708 {
709   PetscErrorCode         ierr;
710   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
711   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
712   PetscScalar            *x_array,*y_array;
713   PC                     subpc;
714 
715   PetscFunctionBegin;
716   /*
717       The VecPlaceArray() is to avoid having to copy the
718     y vector into the bjac->x vector. The reason for
719     the bjac->x vector is that we need a sequential vector
720     for the sequential solve.
721   */
722   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
723   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
724   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
725   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
726 
727   /* apply the symmetric right portion of the inner PC operator */
728   /* note this by-passes the inner KSP and its options completely */
729 
730   ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr);
731   ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr);
732 
733   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
734   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
735   PetscFunctionReturn(0);
736 }
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock"
740 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y)
741 {
742   PetscErrorCode         ierr;
743   PC_BJacobi             *jac  = (PC_BJacobi*)pc->data;
744   PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data;
745   PetscScalar            *x_array,*y_array;
746 
747   PetscFunctionBegin;
748   /*
749       The VecPlaceArray() is to avoid having to copy the
750     y vector into the bjac->x vector. The reason for
751     the bjac->x vector is that we need a sequential vector
752     for the sequential solve.
753   */
754   ierr = VecGetArray(x,&x_array);CHKERRQ(ierr);
755   ierr = VecGetArray(y,&y_array);CHKERRQ(ierr);
756   ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr);
757   ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr);
758   ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr);
759   ierr = VecResetArray(bjac->x);CHKERRQ(ierr);
760   ierr = VecResetArray(bjac->y);CHKERRQ(ierr);
761   ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr);
762   ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr);
763   PetscFunctionReturn(0);
764 }
765 
766 #undef __FUNCT__
767 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock"
768 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat)
769 {
770   PC_BJacobi             *jac = (PC_BJacobi*)pc->data;
771   PetscErrorCode         ierr;
772   PetscInt               m;
773   KSP                    ksp;
774   PC_BJacobi_Singleblock *bjac;
775   PetscBool              wasSetup = PETSC_TRUE;
776 
777   PetscFunctionBegin;
778   if (!pc->setupcalled) {
779     const char *prefix;
780 
781     if (!jac->ksp) {
782       wasSetup = PETSC_FALSE;
783 
784       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
785       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
786       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
787       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
788       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
789       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
790       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
791 
792       pc->ops->reset               = PCReset_BJacobi_Singleblock;
793       pc->ops->destroy             = PCDestroy_BJacobi_Singleblock;
794       pc->ops->apply               = PCApply_BJacobi_Singleblock;
795       pc->ops->applysymmetricleft  = PCApplySymmetricLeft_BJacobi_Singleblock;
796       pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock;
797       pc->ops->applytranspose      = PCApplyTranspose_BJacobi_Singleblock;
798       pc->ops->setuponblocks       = PCSetUpOnBlocks_BJacobi_Singleblock;
799 
800       ierr        = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
801       jac->ksp[0] = ksp;
802 
803       ierr      = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
804       jac->data = (void*)bjac;
805     } else {
806       ksp  = jac->ksp[0];
807       bjac = (PC_BJacobi_Singleblock*)jac->data;
808     }
809 
810     /*
811       The reason we need to generate these vectors is to serve
812       as the right-hand side and solution vector for the solve on the
813       block. We do not need to allocate space for the vectors since
814       that is provided via VecPlaceArray() just before the call to
815       KSPSolve() on the block.
816     */
817     ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr);
818     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr);
819     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr);
820 #ifdef PETSC_HAVE_CUSP
821     ierr = VecSetType(bjac->x,VECCUSP);CHKERRQ(ierr);
822     ierr = VecSetType(bjac->y,VECCUSP);CHKERRQ(ierr);
823 #endif
824     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->x);CHKERRQ(ierr);
825     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->y);CHKERRQ(ierr);
826   } else {
827     ksp  = jac->ksp[0];
828     bjac = (PC_BJacobi_Singleblock*)jac->data;
829   }
830   if (pc->useAmat) {
831     ierr = KSPSetOperators(ksp,mat,pmat);CHKERRQ(ierr);
832   } else {
833     ierr = KSPSetOperators(ksp,pmat,pmat);CHKERRQ(ierr);
834   }
835   if (!wasSetup && pc->setfromoptionscalled) {
836     /* If PCSetFromOptions_BJacobi is called later, KSPSetFromOptions will be called at that time. */
837     ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
838   }
839   PetscFunctionReturn(0);
840 }
841 
842 /* ---------------------------------------------------------------------------------------------*/
843 #undef __FUNCT__
844 #define __FUNCT__ "PCReset_BJacobi_Multiblock"
845 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc)
846 {
847   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
848   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
849   PetscErrorCode        ierr;
850   PetscInt              i;
851 
852   PetscFunctionBegin;
853   if (bjac && bjac->pmat) {
854     ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr);
855     if (pc->useAmat) {
856       ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr);
857     }
858   }
859 
860   for (i=0; i<jac->n_local; i++) {
861     ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr);
862     if (bjac && bjac->x) {
863       ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr);
864       ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr);
865       ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr);
866     }
867   }
868   ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);
869   ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);
870   PetscFunctionReturn(0);
871 }
872 
873 #undef __FUNCT__
874 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock"
875 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc)
876 {
877   PC_BJacobi            *jac  = (PC_BJacobi*)pc->data;
878   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
879   PetscErrorCode        ierr;
880   PetscInt              i;
881 
882   PetscFunctionBegin;
883   ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr);
884   if (bjac) {
885     ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr);
886     ierr = PetscFree(bjac->starts);CHKERRQ(ierr);
887     ierr = PetscFree(bjac->is);CHKERRQ(ierr);
888   }
889   ierr = PetscFree(jac->data);CHKERRQ(ierr);
890   for (i=0; i<jac->n_local; i++) {
891     ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr);
892   }
893   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
894   ierr = PetscFree(pc->data);CHKERRQ(ierr);
895   PetscFunctionReturn(0);
896 }
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock"
900 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc)
901 {
902   PC_BJacobi     *jac = (PC_BJacobi*)pc->data;
903   PetscErrorCode ierr;
904   PetscInt       i,n_local = jac->n_local;
905 
906   PetscFunctionBegin;
907   for (i=0; i<n_local; i++) {
908     ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr);
909   }
910   PetscFunctionReturn(0);
911 }
912 
913 /*
914       Preconditioner for block Jacobi
915 */
916 #undef __FUNCT__
917 #define __FUNCT__ "PCApply_BJacobi_Multiblock"
918 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y)
919 {
920   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
921   PetscErrorCode        ierr;
922   PetscInt              i,n_local = jac->n_local;
923   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
924   PetscScalar           *xin,*yin;
925 
926   PetscFunctionBegin;
927   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
928   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
929   for (i=0; i<n_local; i++) {
930     /*
931        To avoid copying the subvector from x into a workspace we instead
932        make the workspace vector array point to the subpart of the array of
933        the global vector.
934     */
935     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
936     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
937 
938     ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
939     ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
940     ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
941 
942     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
943     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
944   }
945   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
946   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
947   PetscFunctionReturn(0);
948 }
949 
950 /*
951       Preconditioner for block Jacobi
952 */
953 #undef __FUNCT__
954 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock"
955 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y)
956 {
957   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
958   PetscErrorCode        ierr;
959   PetscInt              i,n_local = jac->n_local;
960   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
961   PetscScalar           *xin,*yin;
962 
963   PetscFunctionBegin;
964   ierr = VecGetArray(x,&xin);CHKERRQ(ierr);
965   ierr = VecGetArray(y,&yin);CHKERRQ(ierr);
966   for (i=0; i<n_local; i++) {
967     /*
968        To avoid copying the subvector from x into a workspace we instead
969        make the workspace vector array point to the subpart of the array of
970        the global vector.
971     */
972     ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr);
973     ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr);
974 
975     ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
976     ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr);
977     ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr);
978 
979     ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr);
980     ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr);
981   }
982   ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr);
983   ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr);
984   PetscFunctionReturn(0);
985 }
986 
987 #undef __FUNCT__
988 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock"
989 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat)
990 {
991   PC_BJacobi            *jac = (PC_BJacobi*)pc->data;
992   PetscErrorCode        ierr;
993   PetscInt              m,n_local,N,M,start,i;
994   const char            *prefix,*pprefix,*mprefix;
995   KSP                   ksp;
996   Vec                   x,y;
997   PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data;
998   PC                    subpc;
999   IS                    is;
1000   MatReuse              scall;
1001 
1002   PetscFunctionBegin;
1003   ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr);
1004 
1005   n_local = jac->n_local;
1006 
1007   if (pc->useAmat) {
1008     PetscBool same;
1009     ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr);
1010     if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type");
1011   }
1012 
1013   if (!pc->setupcalled) {
1014     scall = MAT_INITIAL_MATRIX;
1015 
1016     if (!jac->ksp) {
1017       pc->ops->reset         = PCReset_BJacobi_Multiblock;
1018       pc->ops->destroy       = PCDestroy_BJacobi_Multiblock;
1019       pc->ops->apply         = PCApply_BJacobi_Multiblock;
1020       pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock;
1021       pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock;
1022 
1023       ierr = PetscNewLog(pc,&bjac);CHKERRQ(ierr);
1024       ierr = PetscMalloc1(n_local,&jac->ksp);CHKERRQ(ierr);
1025       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr);
1026       ierr = PetscMalloc2(n_local,&bjac->x,n_local,&bjac->y);CHKERRQ(ierr);
1027       ierr = PetscMalloc1(n_local,&bjac->starts);CHKERRQ(ierr);
1028       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr);
1029 
1030       jac->data = (void*)bjac;
1031       ierr      = PetscMalloc1(n_local,&bjac->is);CHKERRQ(ierr);
1032       ierr      = PetscLogObjectMemory((PetscObject)pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr);
1033 
1034       for (i=0; i<n_local; i++) {
1035         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
1036         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
1037         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
1038         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
1039         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
1040         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1041         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
1042         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
1043 
1044         jac->ksp[i] = ksp;
1045       }
1046     } else {
1047       bjac = (PC_BJacobi_Multiblock*)jac->data;
1048     }
1049 
1050     start = 0;
1051     for (i=0; i<n_local; i++) {
1052       m = jac->l_lens[i];
1053       /*
1054       The reason we need to generate these vectors is to serve
1055       as the right-hand side and solution vector for the solve on the
1056       block. We do not need to allocate space for the vectors since
1057       that is provided via VecPlaceArray() just before the call to
1058       KSPSolve() on the block.
1059 
1060       */
1061       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr);
1062       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr);
1063 #ifdef PETSC_HAVE_CUSP
1064       ierr = VecSetType(x,VECCUSP);CHKERRQ(ierr);
1065       ierr = VecSetType(y,VECCUSP);CHKERRQ(ierr);
1066 #endif
1067       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)x);CHKERRQ(ierr);
1068       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)y);CHKERRQ(ierr);
1069 
1070       bjac->x[i]      = x;
1071       bjac->y[i]      = y;
1072       bjac->starts[i] = start;
1073 
1074       ierr        = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr);
1075       bjac->is[i] = is;
1076       ierr        = PetscLogObjectParent((PetscObject)pc,(PetscObject)is);CHKERRQ(ierr);
1077 
1078       start += m;
1079     }
1080   } else {
1081     bjac = (PC_BJacobi_Multiblock*)jac->data;
1082     /*
1083        Destroy the blocks from the previous iteration
1084     */
1085     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1086       ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr);
1087       if (pc->useAmat) {
1088         ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr);
1089       }
1090       scall = MAT_INITIAL_MATRIX;
1091     } else scall = MAT_REUSE_MATRIX;
1092   }
1093 
1094   ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr);
1095   if (pc->useAmat) {
1096     ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr);
1097     ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr);
1098   }
1099   /* Return control to the user so that the submatrices can be modified (e.g., to apply
1100      different boundary conditions for the submatrices than for the global problem) */
1101   ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
1102 
1103   ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr);
1104   for (i=0; i<n_local; i++) {
1105     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->pmat[i]);CHKERRQ(ierr);
1106     ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr);
1107     if (pc->useAmat) {
1108       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)bjac->mat[i]);CHKERRQ(ierr);
1109       ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr);
1110       ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i]);CHKERRQ(ierr);
1111     } else {
1112       ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i]);CHKERRQ(ierr);
1113     }
1114     if (pc->setfromoptionscalled) {
1115       ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr);
1116     }
1117   }
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 /* ---------------------------------------------------------------------------------------------*/
1122 /*
1123       These are for a single block with multiple processes;
1124 */
1125 #undef __FUNCT__
1126 #define __FUNCT__ "PCReset_BJacobi_Multiproc"
1127 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc)
1128 {
1129   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1130   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1131   PetscErrorCode       ierr;
1132 
1133   PetscFunctionBegin;
1134   ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr);
1135   ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr);
1136   ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);
1137   if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);}
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 #undef __FUNCT__
1142 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc"
1143 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc)
1144 {
1145   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1146   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1147   PetscErrorCode       ierr;
1148 
1149   PetscFunctionBegin;
1150   ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr);
1151   ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr);
1152   ierr = PetscFree(jac->ksp);CHKERRQ(ierr);
1153   ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr);
1154 
1155   ierr = PetscFree(mpjac);CHKERRQ(ierr);
1156   ierr = PetscFree(pc->data);CHKERRQ(ierr);
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 #undef __FUNCT__
1161 #define __FUNCT__ "PCApply_BJacobi_Multiproc"
1162 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y)
1163 {
1164   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1165   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1166   PetscErrorCode       ierr;
1167   PetscScalar          *xarray,*yarray;
1168 
1169   PetscFunctionBegin;
1170   /* place x's and y's local arrays into xsub and ysub */
1171   ierr = VecGetArray(x,&xarray);CHKERRQ(ierr);
1172   ierr = VecGetArray(y,&yarray);CHKERRQ(ierr);
1173   ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr);
1174   ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr);
1175 
1176   /* apply preconditioner on each matrix block */
1177   ierr = PetscLogEventBegin(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1178   ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr);
1179   ierr = PetscLogEventEnd(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr);
1180 
1181   ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr);
1182   ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr);
1183   ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1184   ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1185   PetscFunctionReturn(0);
1186 }
1187 
1188 #include <petsc-private/matimpl.h>
1189 #undef __FUNCT__
1190 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc"
1191 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc)
1192 {
1193   PC_BJacobi           *jac   = (PC_BJacobi*)pc->data;
1194   PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data;
1195   PetscErrorCode       ierr;
1196   PetscInt             m,n;
1197   MPI_Comm             comm,subcomm=0;
1198   const char           *prefix;
1199   PetscBool            wasSetup = PETSC_TRUE;
1200 
1201   PetscFunctionBegin;
1202   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1203   if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported");
1204   jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */
1205   if (!pc->setupcalled) {
1206     wasSetup  = PETSC_FALSE;
1207     ierr      = PetscNewLog(pc,&mpjac);CHKERRQ(ierr);
1208     jac->data = (void*)mpjac;
1209 
1210     /* initialize datastructure mpjac */
1211     if (!jac->psubcomm) {
1212       /* Create default contiguous subcommunicatiors if user does not provide them */
1213       ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr);
1214       ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr);
1215       ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr);
1216       ierr = PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));CHKERRQ(ierr);
1217     }
1218     mpjac->psubcomm = jac->psubcomm;
1219     subcomm         = mpjac->psubcomm->comm;
1220 
1221     /* Get matrix blocks of pmat */
1222     if (!pc->pmat->ops->getmultiprocblock) SETERRQ(PetscObjectComm((PetscObject)pc->pmat),PETSC_ERR_SUP,"No support for the requested operation");
1223     ierr = (*pc->pmat->ops->getmultiprocblock)(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1224 
1225     /* create a new PC that processors in each subcomm have copy of */
1226     ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr);
1227     ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr);
1228     ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr);
1229     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)jac->ksp[0]);CHKERRQ(ierr);
1230     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1231     ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr);
1232 
1233     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
1234     ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr);
1235     ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr);
1236     /*
1237       PetscMPIInt rank,subsize,subrank;
1238       ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1239       ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
1240       ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
1241 
1242       ierr = MatGetLocalSize(mpjac->submats,&m,NULL);CHKERRQ(ierr);
1243       ierr = MatGetSize(mpjac->submats,&n,NULL);CHKERRQ(ierr);
1244       ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank);
1245       ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1246     */
1247 
1248     /* create dummy vectors xsub and ysub */
1249     ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr);
1250     ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr);
1251     ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr);
1252 #ifdef PETSC_HAVE_CUSP
1253     ierr = VecSetType(mpjac->xsub,VECMPICUSP);CHKERRQ(ierr);
1254     ierr = VecSetType(mpjac->ysub,VECMPICUSP);CHKERRQ(ierr);
1255 #endif
1256     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->xsub);CHKERRQ(ierr);
1257     ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)mpjac->ysub);CHKERRQ(ierr);
1258 
1259     pc->ops->reset   = PCReset_BJacobi_Multiproc;
1260     pc->ops->destroy = PCDestroy_BJacobi_Multiproc;
1261     pc->ops->apply   = PCApply_BJacobi_Multiproc;
1262   } else { /* pc->setupcalled */
1263     subcomm = mpjac->psubcomm->comm;
1264     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
1265       /* destroy old matrix blocks, then get new matrix blocks */
1266       if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);}
1267       ierr = (*pc->pmat->ops->getmultiprocblock)(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1268     } else {
1269       ierr = (*pc->pmat->ops->getmultiprocblock)(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr);
1270     }
1271     ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats);CHKERRQ(ierr);
1272   }
1273 
1274   if (!wasSetup && pc->setfromoptionscalled) {
1275     ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr);
1276   }
1277   PetscFunctionReturn(0);
1278 }
1279