xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 9af31e4ad595286b4e2df8194fee047feeccfe42)
1 /*
2   This file defines an additive Schwarz preconditioner for any Mat implementation.
3 
4   Note that each processor may have any number of subdomains. But in order to
5   deal easily with the VecScatter(), we treat each processor as if it has the
6   same number of subdomains.
7 
8        n - total number of true subdomains on all processors
9        n_local_true - actual number of subdomains on this processor
10        n_local = maximum over all processors of n_local_true
11 */
12 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   int        n,n_local,n_local_true;
16   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
17   int        overlap;             /* overlap requested by user */
18   KSP       *ksp;               /* linear solvers for each block */
19   VecScatter *scat;               /* mapping to subregion */
20   Vec        *x,*y;
21   IS         *is;                 /* index set that defines each subdomain */
22   Mat        *mat,*pmat;          /* mat is not currently used */
23   PCASMType  type;                /* use reduced interpolation, restriction or both */
24   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
25   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
26   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
27                                      PCSetUpOnBlocks() is done. Similar to inplace
28                                      factorization in the case of LU and ILU */
29 } PC_ASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCView_ASM"
33 static int PCView_ASM(PC pc,PetscViewer viewer)
34 {
35   PC_ASM      *jac = (PC_ASM*)pc->data;
36   int         rank,ierr,i;
37   const char  *cstring = 0;
38   PetscTruth  iascii,isstring;
39   PetscViewer sviewer;
40 
41 
42   PetscFunctionBegin;
43   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
44   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
45   if (iascii) {
46     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %d, amount of overlap = %d\n",jac->n,jac->overlap);CHKERRQ(ierr);
47     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
48     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
49     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
50     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
51     else                                      cstring = "Unknown ASM type";
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
53     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
54     if (jac->same_local_solves) {
55       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
56       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
57       if (!rank && jac->ksp) {
58         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
59         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
60         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
61       }
62       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
63     } else {
64       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
65       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: number of local blocks = %d\n",rank,jac->n_local);CHKERRQ(ierr);
66       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67       for (i=0; i<jac->n_local; i++) {
68         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: local block number %d\n",rank,i);CHKERRQ(ierr);
69         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
70         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
71         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
72         if (i != jac->n_local-1) {
73           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
74         }
75       }
76       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
78     }
79   } else if (isstring) {
80     ierr = PetscViewerStringSPrintf(viewer," blks=%d, overlap=%d, type=%d",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
81     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
83     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
84   } else {
85     SETERRQ1(1,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
86   }
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "PCSetUp_ASM"
92 static int PCSetUp_ASM(PC pc)
93 {
94   PC_ASM   *osm  = (PC_ASM*)pc->data;
95   int      i,ierr,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
96   int      start,start_val,end_val,size,sz,bs;
97   MatReuse scall = MAT_REUSE_MATRIX;
98   IS       isl;
99   KSP      ksp;
100   PC       subpc;
101   char     *prefix,*pprefix;
102   Vec      vec;
103 
104   PetscFunctionBegin;
105   ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
106   if (!pc->setupcalled) {
107     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
108       /* no subdomains given, use one per processor */
109       osm->n_local_true = osm->n_local = 1;
110       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
111       osm->n = size;
112     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
113       int inwork[2],outwork[2];
114       inwork[0] = inwork[1] = osm->n_local_true;
115       ierr = MPI_Allreduce(inwork,outwork,1,MPI_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
116       osm->n_local = outwork[0];
117       osm->n       = outwork[1];
118     }
119     n_local      = osm->n_local;
120     n_local_true = osm->n_local_true;
121     if (!osm->is){ /* build the index sets */
122       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
123       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
124       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
125       sz    = end_val - start_val;
126       start = start_val;
127       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
128         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
129       }
130       for (i=0; i<n_local_true; i++){
131         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
132         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
133         start      += size;
134         osm->is[i] =  isl;
135       }
136       osm->is_flg = PETSC_TRUE;
137     }
138 
139     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
140     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
141     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
142     osm->y = osm->x + n_local;
143 
144     /*  Extend the "overlapping" regions by a number of steps  */
145     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
146     for (i=0; i<n_local_true; i++) {
147       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
148     }
149 
150     /* create the local work vectors and scatter contexts */
151     for (i=0; i<n_local_true; i++) {
152       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
153       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
154       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
155       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
156       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
157       ierr = ISDestroy(isl);CHKERRQ(ierr);
158     }
159     for (i=n_local_true; i<n_local; i++) {
160       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
161       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
162       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
163       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
164       ierr = ISDestroy(isl);CHKERRQ(ierr);
165     }
166 
167    /*
168        Create the local solvers.
169     */
170     for (i=0; i<n_local_true; i++) {
171       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
172       PetscLogObjectParent(pc,ksp);
173       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
174       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
175       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
176       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
177       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
178       osm->ksp[i] = ksp;
179     }
180     scall = MAT_INITIAL_MATRIX;
181   } else {
182     /*
183        Destroy the blocks from the previous iteration
184     */
185     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
186       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
187       scall = MAT_INITIAL_MATRIX;
188     }
189   }
190 
191   /* extract out the submatrices */
192   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
193 
194   /* Return control to the user so that the submatrices can be modified (e.g., to apply
195      different boundary conditions for the submatrices than for the global problem) */
196   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
197 
198   /* loop over subdomains putting them into local ksp */
199   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
200   for (i=0; i<n_local_true; i++) {
201     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
202     PetscLogObjectParent(pc,osm->pmat[i]);
203     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
204     ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
205   }
206   ierr = VecDestroy(vec);CHKERRQ(ierr);
207   PetscFunctionReturn(0);
208 }
209 
210 #undef __FUNCT__
211 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
212 static int PCSetUpOnBlocks_ASM(PC pc)
213 {
214   PC_ASM *osm = (PC_ASM*)pc->data;
215   int    i,ierr;
216 
217   PetscFunctionBegin;
218   for (i=0; i<osm->n_local_true; i++) {
219     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
220   }
221   /*
222      If inplace flag is set, then destroy the matrix after the setup
223      on blocks is done.
224   */
225   if (osm->inplace && osm->n_local_true > 0) {
226     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
227   }
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "PCApply_ASM"
233 static int PCApply_ASM(PC pc,Vec x,Vec y)
234 {
235   PC_ASM      *osm = (PC_ASM*)pc->data;
236   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
237   PetscScalar zero = 0.0;
238   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
239 
240   PetscFunctionBegin;
241   /*
242        Support for limiting the restriction or interpolation to only local
243      subdomain values (leaving the other values 0).
244   */
245   if (!(osm->type & PC_ASM_RESTRICT)) {
246     forward = SCATTER_FORWARD_LOCAL;
247     /* have to zero the work RHS since scatter may leave some slots empty */
248     for (i=0; i<n_local; i++) {
249       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
250     }
251   }
252   if (!(osm->type & PC_ASM_INTERPOLATE)) {
253     reverse = SCATTER_REVERSE_LOCAL;
254   }
255 
256   for (i=0; i<n_local; i++) {
257     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
258   }
259   ierr = VecSet(&zero,y);CHKERRQ(ierr);
260   /* do the local solves */
261   for (i=0; i<n_local_true; i++) {
262     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
263     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
264     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
265   }
266   /* handle the rest of the scatters that do not have local solves */
267   for (i=n_local_true; i<n_local; i++) {
268     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
269     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
270   }
271   for (i=0; i<n_local; i++) {
272     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
273   }
274   PetscFunctionReturn(0);
275 }
276 
277 #undef __FUNCT__
278 #define __FUNCT__ "PCApplyTranspose_ASM"
279 static int PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
280 {
281   PC_ASM      *osm = (PC_ASM*)pc->data;
282   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
283   PetscScalar zero = 0.0;
284   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
285 
286   PetscFunctionBegin;
287   /*
288        Support for limiting the restriction or interpolation to only local
289      subdomain values (leaving the other values 0).
290 
291        Note: these are reversed from the PCApply_ASM() because we are applying the
292      transpose of the three terms
293   */
294   if (!(osm->type & PC_ASM_INTERPOLATE)) {
295     forward = SCATTER_FORWARD_LOCAL;
296     /* have to zero the work RHS since scatter may leave some slots empty */
297     for (i=0; i<n_local; i++) {
298       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
299     }
300   }
301   if (!(osm->type & PC_ASM_RESTRICT)) {
302     reverse = SCATTER_REVERSE_LOCAL;
303   }
304 
305   for (i=0; i<n_local; i++) {
306     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
307   }
308   ierr = VecSet(&zero,y);CHKERRQ(ierr);
309   /* do the local solves */
310   for (i=0; i<n_local_true; i++) {
311     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
312     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
313     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
314   }
315   /* handle the rest of the scatters that do not have local solves */
316   for (i=n_local_true; i<n_local; i++) {
317     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
318     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
319   }
320   for (i=0; i<n_local; i++) {
321     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
322   }
323   PetscFunctionReturn(0);
324 }
325 
326 #undef __FUNCT__
327 #define __FUNCT__ "PCDestroy_ASM"
328 static int PCDestroy_ASM(PC pc)
329 {
330   PC_ASM *osm = (PC_ASM*)pc->data;
331   int    i,ierr;
332 
333   PetscFunctionBegin;
334   for (i=0; i<osm->n_local; i++) {
335     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
336     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
337     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
338   }
339   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
340     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
341   }
342   if (osm->ksp) {
343     for (i=0; i<osm->n_local_true; i++) {
344       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
345     }
346   }
347   if (osm->is_flg) {
348     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
349     ierr = PetscFree(osm->is);CHKERRQ(ierr);
350   }
351   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
352   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
353   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
354   ierr = PetscFree(osm);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PCSetFromOptions_ASM"
360 static int PCSetFromOptions_ASM(PC pc)
361 {
362   PC_ASM     *osm = (PC_ASM*)pc->data;
363   int        blocks,ovl,ierr,indx;
364   PetscTruth flg,set,sym;
365   const char *type[] = {"none","restrict","interpolate","basic"};
366 
367   PetscFunctionBegin;
368   /* set the type to symmetric if matrix is symmetric */
369   if (pc->pmat && !osm->type_set) {
370     ierr = MatIsSymmetricKnown(pc->pmat,&set,&sym);CHKERRQ(ierr);
371     if (set && sym) {
372       osm->type = PC_ASM_BASIC;
373     }
374   }
375   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
376     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
377     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
378     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
379     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
380     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
381     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
382     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
383     if (flg) {
384       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
385     }
386   ierr = PetscOptionsTail();CHKERRQ(ierr);
387   PetscFunctionReturn(0);
388 }
389 
390 /*------------------------------------------------------------------------------------*/
391 
392 EXTERN_C_BEGIN
393 #undef __FUNCT__
394 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
395 int PCASMSetLocalSubdomains_ASM(PC pc,int n,IS is[])
396 {
397   PC_ASM *osm = (PC_ASM*)pc->data;
398 
399   PetscFunctionBegin;
400   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
401 
402   if (pc->setupcalled && n != osm->n_local_true) {
403     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
404   }
405   if (!pc->setupcalled){
406     osm->n_local_true = n;
407     osm->is           = is;
408   }
409   PetscFunctionReturn(0);
410 }
411 EXTERN_C_END
412 
413 EXTERN_C_BEGIN
414 #undef __FUNCT__
415 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
416 int PCASMSetTotalSubdomains_ASM(PC pc,int N,IS *is)
417 {
418   PC_ASM *osm = (PC_ASM*)pc->data;
419   int    rank,size,ierr,n;
420 
421   PetscFunctionBegin;
422 
423   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
424 they cannot be set globally yet.");
425 
426   /*
427      Split the subdomains equally amoung all processors
428   */
429   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
430   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
431   n = N/size + ((N % size) > rank);
432   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
433   if (!pc->setupcalled){
434     osm->n_local_true = n;
435     osm->is           = 0;
436   }
437   PetscFunctionReturn(0);
438 }
439 EXTERN_C_END
440 
441 EXTERN_C_BEGIN
442 #undef __FUNCT__
443 #define __FUNCT__ "PCASMSetOverlap_ASM"
444 int PCASMSetOverlap_ASM(PC pc,int ovl)
445 {
446   PC_ASM *osm;
447 
448   PetscFunctionBegin;
449   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
450 
451   osm               = (PC_ASM*)pc->data;
452   osm->overlap      = ovl;
453   PetscFunctionReturn(0);
454 }
455 EXTERN_C_END
456 
457 EXTERN_C_BEGIN
458 #undef __FUNCT__
459 #define __FUNCT__ "PCASMSetType_ASM"
460 int PCASMSetType_ASM(PC pc,PCASMType type)
461 {
462   PC_ASM *osm;
463 
464   PetscFunctionBegin;
465   osm        = (PC_ASM*)pc->data;
466   osm->type     = type;
467   osm->type_set = PETSC_TRUE;
468   PetscFunctionReturn(0);
469 }
470 EXTERN_C_END
471 
472 EXTERN_C_BEGIN
473 #undef __FUNCT__
474 #define __FUNCT__ "PCASMGetSubKSP_ASM"
475 int PCASMGetSubKSP_ASM(PC pc,int *n_local,int *first_local,KSP **ksp)
476 {
477   PC_ASM   *jac = (PC_ASM*)pc->data;
478   int      ierr;
479 
480   PetscFunctionBegin;
481   if (jac->n_local_true < 0) {
482     SETERRQ(1,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
483   }
484 
485   if (n_local)     *n_local     = jac->n_local_true;
486   if (first_local) {
487     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPI_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
488     *first_local -= jac->n_local_true;
489   }
490   *ksp                         = jac->ksp;
491   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
492                                       not necessarily true though!  This flag is
493                                       used only for PCView_ASM() */
494   PetscFunctionReturn(0);
495 }
496 EXTERN_C_END
497 
498 EXTERN_C_BEGIN
499 #undef __FUNCT__
500 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
501 int PCASMSetUseInPlace_ASM(PC pc)
502 {
503   PC_ASM *dir;
504 
505   PetscFunctionBegin;
506   dir          = (PC_ASM*)pc->data;
507   dir->inplace = PETSC_TRUE;
508   PetscFunctionReturn(0);
509 }
510 EXTERN_C_END
511 
512 /*----------------------------------------------------------------------------*/
513 #undef __FUNCT__
514 #define __FUNCT__ "PCASMSetUseInPlace"
515 /*@
516    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
517 
518    Collective on PC
519 
520    Input Parameters:
521 .  pc - the preconditioner context
522 
523    Options Database Key:
524 .  -pc_asm_in_place - Activates in-place factorization
525 
526    Note:
527    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
528    when the original matrix is not required during the Solve process.
529    This destroys the matrix, early thus, saving on memory usage.
530 
531    Level: intermediate
532 
533 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
534 
535 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
536 @*/
537 int PCASMSetUseInPlace(PC pc)
538 {
539   int ierr,(*f)(PC);
540 
541   PetscFunctionBegin;
542   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
543   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
544   if (f) {
545     ierr = (*f)(pc);CHKERRQ(ierr);
546   }
547   PetscFunctionReturn(0);
548 }
549 /*----------------------------------------------------------------------------*/
550 
551 #undef __FUNCT__
552 #define __FUNCT__ "PCASMSetLocalSubdomains"
553 /*@C
554     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
555     only) for the additive Schwarz preconditioner.
556 
557     Collective on PC
558 
559     Input Parameters:
560 +   pc - the preconditioner context
561 .   n - the number of subdomains for this processor (default value = 1)
562 -   is - the index sets that define the subdomains for this processor
563          (or PETSC_NULL for PETSc to determine subdomains)
564 
565     Notes:
566     The IS numbering is in the parallel, global numbering of the vector.
567 
568     By default the ASM preconditioner uses 1 block per processor.
569 
570     These index sets cannot be destroyed until after completion of the
571     linear solves for which the ASM preconditioner is being used.
572 
573     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
574 
575     Level: advanced
576 
577 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
578 
579 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
580           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
581 @*/
582 int PCASMSetLocalSubdomains(PC pc,int n,IS is[])
583 {
584   int ierr,(*f)(PC,int,IS[]);
585 
586   PetscFunctionBegin;
587   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
588   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
589   if (f) {
590     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
591   }
592   PetscFunctionReturn(0);
593 }
594 
595 #undef __FUNCT__
596 #define __FUNCT__ "PCASMSetTotalSubdomains"
597 /*@C
598     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
599     additive Schwarz preconditioner.  Either all or no processors in the
600     PC communicator must call this routine, with the same index sets.
601 
602     Collective on PC
603 
604     Input Parameters:
605 +   pc - the preconditioner context
606 .   n - the number of subdomains for all processors
607 -   is - the index sets that define the subdomains for all processor
608          (or PETSC_NULL for PETSc to determine subdomains)
609 
610     Options Database Key:
611     To set the total number of subdomain blocks rather than specify the
612     index sets, use the option
613 .    -pc_asm_blocks <blks> - Sets total blocks
614 
615     Notes:
616     Currently you cannot use this to set the actual subdomains with the argument is.
617 
618     By default the ASM preconditioner uses 1 block per processor.
619 
620     These index sets cannot be destroyed until after completion of the
621     linear solves for which the ASM preconditioner is being used.
622 
623     Use PCASMSetLocalSubdomains() to set local subdomains.
624 
625     Level: advanced
626 
627 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
628 
629 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
630           PCASMCreateSubdomains2D()
631 @*/
632 int PCASMSetTotalSubdomains(PC pc,int N,IS *is)
633 {
634   int ierr,(*f)(PC,int,IS *);
635 
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
638   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
639   if (f) {
640     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
641   }
642   PetscFunctionReturn(0);
643 }
644 
645 #undef __FUNCT__
646 #define __FUNCT__ "PCASMSetOverlap"
647 /*@
648     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
649     additive Schwarz preconditioner.  Either all or no processors in the
650     PC communicator must call this routine.
651 
652     Collective on PC
653 
654     Input Parameters:
655 +   pc  - the preconditioner context
656 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
657 
658     Options Database Key:
659 .   -pc_asm_overlap <ovl> - Sets overlap
660 
661     Notes:
662     By default the ASM preconditioner uses 1 block per processor.  To use
663     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
664     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
665 
666     The overlap defaults to 1, so if one desires that no additional
667     overlap be computed beyond what may have been set with a call to
668     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
669     must be set to be 0.  In particular, if one does not explicitly set
670     the subdomains an application code, then all overlap would be computed
671     internally by PETSc, and using an overlap of 0 would result in an ASM
672     variant that is equivalent to the block Jacobi preconditioner.
673 
674     Note that one can define initial index sets with any overlap via
675     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
676     PCASMSetOverlap() merely allows PETSc to extend that overlap further
677     if desired.
678 
679     Level: intermediate
680 
681 .keywords: PC, ASM, set, overlap
682 
683 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
684           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
685 @*/
686 int PCASMSetOverlap(PC pc,int ovl)
687 {
688   int ierr,(*f)(PC,int);
689 
690   PetscFunctionBegin;
691   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
692   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
693   if (f) {
694     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
695   }
696   PetscFunctionReturn(0);
697 }
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "PCASMSetType"
701 /*@
702     PCASMSetType - Sets the type of restriction and interpolation used
703     for local problems in the additive Schwarz method.
704 
705     Collective on PC
706 
707     Input Parameters:
708 +   pc  - the preconditioner context
709 -   type - variant of ASM, one of
710 .vb
711       PC_ASM_BASIC       - full interpolation and restriction
712       PC_ASM_RESTRICT    - full restriction, local processor interpolation
713       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
714       PC_ASM_NONE        - local processor restriction and interpolation
715 .ve
716 
717     Options Database Key:
718 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
719 
720     Level: intermediate
721 
722 .keywords: PC, ASM, set, type
723 
724 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
725           PCASMCreateSubdomains2D()
726 @*/
727 int PCASMSetType(PC pc,PCASMType type)
728 {
729   int ierr,(*f)(PC,PCASMType);
730 
731   PetscFunctionBegin;
732   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
733   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
734   if (f) {
735     ierr = (*f)(pc,type);CHKERRQ(ierr);
736   }
737   PetscFunctionReturn(0);
738 }
739 
740 #undef __FUNCT__
741 #define __FUNCT__ "PCASMGetSubKSP"
742 /*@C
743    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
744    this processor.
745 
746    Collective on PC iff first_local is requested
747 
748    Input Parameter:
749 .  pc - the preconditioner context
750 
751    Output Parameters:
752 +  n_local - the number of blocks on this processor or PETSC_NULL
753 .  first_local - the global number of the first block on this processor or PETSC_NULL,
754                  all processors must request or all must pass PETSC_NULL
755 -  ksp - the array of KSP contexts
756 
757    Note:
758    After PCASMGetSubKSP() the array of KSPes is not to be freed
759 
760    Currently for some matrix implementations only 1 block per processor
761    is supported.
762 
763    You must call KSPSetUp() before calling PCASMGetSubKSP().
764 
765    Level: advanced
766 
767 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
768 
769 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
770           PCASMCreateSubdomains2D(),
771 @*/
772 int PCASMGetSubKSP(PC pc,int *n_local,int *first_local,KSP *ksp[])
773 {
774   int ierr,(*f)(PC,int*,int*,KSP **);
775 
776   PetscFunctionBegin;
777   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
778   PetscValidIntPointer(n_local,2);
779   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
780   if (f) {
781     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
782   } else {
783     SETERRQ(1,"Cannot get subksp for this type of PC");
784   }
785 
786  PetscFunctionReturn(0);
787 }
788 
789 /* -------------------------------------------------------------------------------------*/
790 /*MC
791    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
792            its own KSP object.
793 
794    Options Database Keys:
795 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
796 .  -pc_asm_in_place - Activates in-place factorization
797 .  -pc_asm_blocks <blks> - Sets total blocks
798 .  -pc_asm_overlap <ovl> - Sets overlap
799 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
800 
801      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
802       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
803       -pc_asm_type basic to use the standard ASM.
804 
805    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
806      than one processor. Defaults to one block per processor.
807 
808      To set options on the solvers for each block append -sub_ to all the KSP, and PC
809         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
810 
811      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
812          and set the options directly on the resulting KSP object (you can access its PC
813          with KSPGetPC())
814 
815 
816    Level: beginner
817 
818    Concepts: additive Schwarz method
819 
820 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
821            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
822            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
823            PCASMSetUseInPlace()
824 M*/
825 
826 EXTERN_C_BEGIN
827 #undef __FUNCT__
828 #define __FUNCT__ "PCCreate_ASM"
829 int PCCreate_ASM(PC pc)
830 {
831   int    ierr;
832   PC_ASM *osm;
833 
834   PetscFunctionBegin;
835   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
836   PetscLogObjectMemory(pc,sizeof(PC_ASM));
837   ierr = PetscMemzero(osm,sizeof(PC_ASM));CHKERRQ(ierr);
838   osm->n                 = PETSC_DECIDE;
839   osm->n_local           = 0;
840   osm->n_local_true      = PETSC_DECIDE;
841   osm->overlap           = 1;
842   osm->is_flg            = PETSC_FALSE;
843   osm->ksp              = 0;
844   osm->scat              = 0;
845   osm->is                = 0;
846   osm->mat               = 0;
847   osm->pmat              = 0;
848   osm->type              = PC_ASM_RESTRICT;
849   osm->same_local_solves = PETSC_TRUE;
850   osm->inplace           = PETSC_FALSE;
851   pc->data               = (void*)osm;
852 
853   pc->ops->apply             = PCApply_ASM;
854   pc->ops->applytranspose    = PCApplyTranspose_ASM;
855   pc->ops->setup             = PCSetUp_ASM;
856   pc->ops->destroy           = PCDestroy_ASM;
857   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
858   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
859   pc->ops->view              = PCView_ASM;
860   pc->ops->applyrichardson   = 0;
861 
862   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
863                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
864   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
865                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
866   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
867                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
868   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
869                     PCASMSetType_ASM);CHKERRQ(ierr);
870   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
871                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
872 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
873                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
874   PetscFunctionReturn(0);
875 }
876 EXTERN_C_END
877 
878 
879 #undef __FUNCT__
880 #define __FUNCT__ "PCASMCreateSubdomains2D"
881 /*@
882    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
883    preconditioner for a two-dimensional problem on a regular grid.
884 
885    Not Collective
886 
887    Input Parameters:
888 +  m, n - the number of mesh points in the x and y directions
889 .  M, N - the number of subdomains in the x and y directions
890 .  dof - degrees of freedom per node
891 -  overlap - overlap in mesh lines
892 
893    Output Parameters:
894 +  Nsub - the number of subdomains created
895 -  is - the array of index sets defining the subdomains
896 
897    Note:
898    Presently PCAMSCreateSubdomains2d() is valid only for sequential
899    preconditioners.  More general related routines are
900    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
901 
902    Level: advanced
903 
904 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
905 
906 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
907           PCASMSetOverlap()
908 @*/
909 int PCASMCreateSubdomains2D(int m,int n,int M,int N,int dof,int overlap,int *Nsub,IS **is)
910 {
911   int i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
912   int nidx,*idx,loc,ii,jj,ierr,count;
913 
914   PetscFunctionBegin;
915   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
916 
917   *Nsub = N*M;
918   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
919   ystart = 0;
920   loc_outter = 0;
921   for (i=0; i<N; i++) {
922     height = n/N + ((n % N) > i); /* height of subdomain */
923     if (height < 2) SETERRQ(1,"Too many N subdomains for mesh dimension n");
924     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
925     yright = ystart + height + overlap; if (yright > n) yright = n;
926     xstart = 0;
927     for (j=0; j<M; j++) {
928       width = m/M + ((m % M) > j); /* width of subdomain */
929       if (width < 2) SETERRQ(1,"Too many M subdomains for mesh dimension m");
930       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
931       xright = xstart + width + overlap; if (xright > m) xright = m;
932       nidx   = (xright - xleft)*(yright - yleft);
933       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
934       loc    = 0;
935       for (ii=yleft; ii<yright; ii++) {
936         count = m*ii + xleft;
937         for (jj=xleft; jj<xright; jj++) {
938           idx[loc++] = count++;
939         }
940       }
941       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
942       ierr = PetscFree(idx);CHKERRQ(ierr);
943       xstart += width;
944     }
945     ystart += height;
946   }
947   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "PCASMGetLocalSubdomains"
953 /*@C
954     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
955     only) for the additive Schwarz preconditioner.
956 
957     Collective on PC
958 
959     Input Parameter:
960 .   pc - the preconditioner context
961 
962     Output Parameters:
963 +   n - the number of subdomains for this processor (default value = 1)
964 -   is - the index sets that define the subdomains for this processor
965 
966 
967     Notes:
968     The IS numbering is in the parallel, global numbering of the vector.
969 
970     Level: advanced
971 
972 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
973 
974 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
975           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
976 @*/
977 int PCASMGetLocalSubdomains(PC pc,int *n,IS *is[])
978 {
979   PC_ASM *osm;
980 
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
983   PetscValidIntPointer(n,2);
984   if (!pc->setupcalled) {
985     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
986   }
987 
988   osm = (PC_ASM*)pc->data;
989   if (n)  *n = osm->n_local_true;
990   if (is) *is = osm->is;
991   PetscFunctionReturn(0);
992 }
993 
994 #undef __FUNCT__
995 #define __FUNCT__ "PCASMGetLocalSubmatrices"
996 /*@C
997     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
998     only) for the additive Schwarz preconditioner.
999 
1000     Collective on PC
1001 
1002     Input Parameter:
1003 .   pc - the preconditioner context
1004 
1005     Output Parameters:
1006 +   n - the number of matrices for this processor (default value = 1)
1007 -   mat - the matrices
1008 
1009 
1010     Level: advanced
1011 
1012 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1013 
1014 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1015           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1016 @*/
1017 int PCASMGetLocalSubmatrices(PC pc,int *n,Mat *mat[])
1018 {
1019   PC_ASM *osm;
1020 
1021   PetscFunctionBegin;
1022   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1023   PetscValidPointer(n,2);
1024   if (!pc->setupcalled) {
1025     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1026   }
1027 
1028   osm = (PC_ASM*)pc->data;
1029   if (n)   *n   = osm->n_local_true;
1030   if (mat) *mat = osm->pmat;
1031   PetscFunctionReturn(0);
1032 }
1033 
1034