xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 702e6c57188f6ca39e32e26fd8fcae05ffbfed3e)
1 /*$Id: asm.c,v 1.131 2001/08/07 03:03:38 balay Exp $*/
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include "src/ksp/pc/pcimpl.h"     /*I "petscpc.h" I*/
14 
15 typedef struct {
16   int        n,n_local,n_local_true;
17   PetscTruth is_flg;              /* flg set to 1 if the IS created in pcsetup */
18   int        overlap;             /* overlap requested by user */
19   KSP       *ksp;               /* linear solvers for each block */
20   VecScatter *scat;               /* mapping to subregion */
21   Vec        *x,*y;
22   IS         *is;                 /* index set that defines each subdomain */
23   Mat        *mat,*pmat;          /* mat is not currently used */
24   PCASMType  type;                /* use reduced interpolation, restriction or both */
25   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
26   PetscTruth inplace;             /* indicates that the sub-matrices are deleted after
27                                      PCSetUpOnBlocks() is done. Similar to inplace
28                                      factorization in the case of LU and ILU */
29 } PC_ASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCView_ASM"
33 static int PCView_ASM(PC pc,PetscViewer viewer)
34 {
35   PC_ASM      *jac = (PC_ASM*)pc->data;
36   int         rank,ierr,i;
37   char        *cstring = 0;
38   PetscTruth  isascii,isstring;
39   PetscViewer sviewer;
40 
41 
42   PetscFunctionBegin;
43   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
44   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
45   if (isascii) {
46     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %d, amount of overlap = %d\n",jac->n,jac->overlap);CHKERRQ(ierr);
47     if (jac->type == PC_ASM_NONE)             cstring = "limited restriction and interpolation (PC_ASM_NONE)";
48     else if (jac->type == PC_ASM_RESTRICT)    cstring = "full restriction (PC_ASM_RESTRICT)";
49     else if (jac->type == PC_ASM_INTERPOLATE) cstring = "full interpolation (PC_ASM_INTERPOLATE)";
50     else if (jac->type == PC_ASM_BASIC)       cstring = "full restriction and interpolation (PC_ASM_BASIC)";
51     else                                      cstring = "Unknown ASM type";
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: type - %s\n",cstring);CHKERRQ(ierr);
53     ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
54     if (jac->same_local_solves) {
55       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
56       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
57       if (!rank && jac->ksp) {
58         ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
59         ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);
60         ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
61       }
62       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
63     } else {
64       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
65       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: number of local blocks = %d\n",rank,jac->n_local);CHKERRQ(ierr);
66       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67       for (i=0; i<jac->n_local; i++) {
68         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: local block number %d\n",rank,i);CHKERRQ(ierr);
69         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
70         ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr);
71         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
72         if (i != jac->n_local-1) {
73           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
74         }
75       }
76       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
78     }
79   } else if (isstring) {
80     ierr = PetscViewerStringSPrintf(viewer," blks=%d, overlap=%d, type=%d",jac->n,jac->overlap,jac->type);CHKERRQ(ierr);
81     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);}
83     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
84   } else {
85     SETERRQ1(1,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
86   }
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "PCSetUp_ASM"
92 static int PCSetUp_ASM(PC pc)
93 {
94   PC_ASM   *osm  = (PC_ASM*)pc->data;
95   int      i,ierr,m,n_local = osm->n_local,n_local_true = osm->n_local_true;
96   int      start,start_val,end_val,size,sz,bs;
97   MatReuse scall = MAT_REUSE_MATRIX;
98   IS       isl;
99   KSP     ksp;
100   KSP      subksp;
101   PC       subpc;
102   char     *prefix,*pprefix;
103 
104   PetscFunctionBegin;
105   if (!pc->setupcalled) {
106     if (osm->n == PETSC_DECIDE && osm->n_local_true == PETSC_DECIDE) {
107       /* no subdomains given, use one per processor */
108       osm->n_local_true = osm->n_local = 1;
109       ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
110       osm->n = size;
111     } else if (osm->n == PETSC_DECIDE) { /* determine global number of subdomains */
112       int inwork[2],outwork[2];
113       inwork[0] = inwork[1] = osm->n_local_true;
114       ierr = MPI_Allreduce(inwork,outwork,1,MPI_2INT,PetscMaxSum_Op,pc->comm);CHKERRQ(ierr);
115       osm->n_local = outwork[0];
116       osm->n       = outwork[1];
117     }
118     n_local      = osm->n_local;
119     n_local_true = osm->n_local_true;
120     if (!osm->is){ /* build the index sets */
121       ierr  = PetscMalloc((n_local_true+1)*sizeof(IS **),&osm->is);CHKERRQ(ierr);
122       ierr  = MatGetOwnershipRange(pc->pmat,&start_val,&end_val);CHKERRQ(ierr);
123       ierr  = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr);
124       sz    = end_val - start_val;
125       start = start_val;
126       if (end_val/bs*bs != end_val || start_val/bs*bs != start_val) {
127         SETERRQ(PETSC_ERR_ARG_WRONG,"Bad distribution for matrix block size");
128       }
129       for (i=0; i<n_local_true; i++){
130         size       =  ((sz/bs)/n_local_true + (((sz/bs) % n_local_true) > i))*bs;
131         ierr       =  ISCreateStride(PETSC_COMM_SELF,size,start,1,&isl);CHKERRQ(ierr);
132         start      += size;
133         osm->is[i] =  isl;
134       }
135       osm->is_flg = PETSC_TRUE;
136     }
137 
138     ierr   = PetscMalloc((n_local_true+1)*sizeof(KSP **),&osm->ksp);CHKERRQ(ierr);
139     ierr   = PetscMalloc(n_local*sizeof(VecScatter **),&osm->scat);CHKERRQ(ierr);
140     ierr   = PetscMalloc(2*n_local*sizeof(Vec **),&osm->x);CHKERRQ(ierr);
141     osm->y = osm->x + n_local;
142 
143     /*  Extend the "overlapping" regions by a number of steps  */
144     ierr = MatIncreaseOverlap(pc->pmat,n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
145     for (i=0; i<n_local_true; i++) {
146       ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
147     }
148 
149     /* create the local work vectors and scatter contexts */
150     for (i=0; i<n_local_true; i++) {
151       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
152       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
153       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
154       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
155       ierr = VecScatterCreate(pc->vec,osm->is[i],osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
156       ierr = ISDestroy(isl);CHKERRQ(ierr);
157     }
158     for (i=n_local_true; i<n_local; i++) {
159       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
160       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
161       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
162       ierr = VecScatterCreate(pc->vec,isl,osm->x[i],isl,&osm->scat[i]);CHKERRQ(ierr);
163       ierr = ISDestroy(isl);CHKERRQ(ierr);
164     }
165 
166    /*
167        Create the local solvers.
168     */
169     for (i=0; i<n_local_true; i++) {
170       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
171       PetscLogObjectParent(pc,ksp);
172       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
173       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
174       ierr = PCSetType(subpc,PCILU);CHKERRQ(ierr);
175       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
176       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
177       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
178       ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
179       osm->ksp[i] = ksp;
180     }
181     scall = MAT_INITIAL_MATRIX;
182   } else {
183     /*
184        Destroy the blocks from the previous iteration
185     */
186     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
187       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
188       scall = MAT_INITIAL_MATRIX;
189     }
190   }
191 
192   /* extract out the submatrices */
193   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
194 
195   /* Return control to the user so that the submatrices can be modified (e.g., to apply
196      different boundary conditions for the submatrices than for the global problem) */
197   ierr = PCModifySubMatrices(pc,osm->n_local,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
198 
199   /* loop over subdomains putting them into local ksp */
200   ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
201   for (i=0; i<n_local_true; i++) {
202     ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
203     PetscLogObjectParent(pc,osm->pmat[i]);
204     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
205   }
206   PetscFunctionReturn(0);
207 }
208 
209 #undef __FUNCT__
210 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
211 static int PCSetUpOnBlocks_ASM(PC pc)
212 {
213   PC_ASM *osm = (PC_ASM*)pc->data;
214   int    i,ierr;
215 
216   PetscFunctionBegin;
217   for (i=0; i<osm->n_local_true; i++) {
218     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
219     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
220     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
221   }
222   /*
223      If inplace flag is set, then destroy the matrix after the setup
224      on blocks is done.
225   */
226   if (osm->inplace && osm->n_local_true > 0) {
227     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
228   }
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "PCApply_ASM"
234 static int PCApply_ASM(PC pc,Vec x,Vec y)
235 {
236   PC_ASM      *osm = (PC_ASM*)pc->data;
237   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
238   PetscScalar zero = 0.0;
239   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
240 
241   PetscFunctionBegin;
242   /*
243        Support for limiting the restriction or interpolation to only local
244      subdomain values (leaving the other values 0).
245   */
246   if (!(osm->type & PC_ASM_RESTRICT)) {
247     forward = SCATTER_FORWARD_LOCAL;
248     /* have to zero the work RHS since scatter may leave some slots empty */
249     for (i=0; i<n_local; i++) {
250       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
251     }
252   }
253   if (!(osm->type & PC_ASM_INTERPOLATE)) {
254     reverse = SCATTER_REVERSE_LOCAL;
255   }
256 
257   for (i=0; i<n_local; i++) {
258     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
259   }
260   ierr = VecSet(&zero,y);CHKERRQ(ierr);
261   /* do the local solves */
262   for (i=0; i<n_local_true; i++) {
263     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
264     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
265     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
266     ierr = KSPSolve(osm->ksp[i]);CHKERRQ(ierr);
267     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
268   }
269   /* handle the rest of the scatters that do not have local solves */
270   for (i=n_local_true; i<n_local; i++) {
271     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
272     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
273   }
274   for (i=0; i<n_local; i++) {
275     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
276   }
277   PetscFunctionReturn(0);
278 }
279 
280 #undef __FUNCT__
281 #define __FUNCT__ "PCApplyTranspose_ASM"
282 static int PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
283 {
284   PC_ASM      *osm = (PC_ASM*)pc->data;
285   int         i,n_local = osm->n_local,n_local_true = osm->n_local_true,ierr;
286   PetscScalar zero = 0.0;
287   ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
288 
289   PetscFunctionBegin;
290   /*
291        Support for limiting the restriction or interpolation to only local
292      subdomain values (leaving the other values 0).
293 
294        Note: these are reversed from the PCApply_ASM() because we are applying the
295      transpose of the three terms
296   */
297   if (!(osm->type & PC_ASM_INTERPOLATE)) {
298     forward = SCATTER_FORWARD_LOCAL;
299     /* have to zero the work RHS since scatter may leave some slots empty */
300     for (i=0; i<n_local; i++) {
301       ierr = VecSet(&zero,osm->x[i]);CHKERRQ(ierr);
302     }
303   }
304   if (!(osm->type & PC_ASM_RESTRICT)) {
305     reverse = SCATTER_REVERSE_LOCAL;
306   }
307 
308   for (i=0; i<n_local; i++) {
309     ierr = VecScatterBegin(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
310   }
311   ierr = VecSet(&zero,y);CHKERRQ(ierr);
312   /* do the local solves */
313   for (i=0; i<n_local_true; i++) {
314     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
315     ierr = KSPSetRhs(osm->ksp[i],osm->x[i]);CHKERRQ(ierr);
316     ierr = KSPSetSolution(osm->ksp[i],osm->y[i]);CHKERRQ(ierr);
317     ierr = KSPSolveTranspose(osm->ksp[i]);CHKERRQ(ierr);
318     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
319   }
320   /* handle the rest of the scatters that do not have local solves */
321   for (i=n_local_true; i<n_local; i++) {
322     ierr = VecScatterEnd(x,osm->x[i],INSERT_VALUES,forward,osm->scat[i]);CHKERRQ(ierr);
323     ierr = VecScatterBegin(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
324   }
325   for (i=0; i<n_local; i++) {
326     ierr = VecScatterEnd(osm->y[i],y,ADD_VALUES,reverse,osm->scat[i]);CHKERRQ(ierr);
327   }
328   PetscFunctionReturn(0);
329 }
330 
331 #undef __FUNCT__
332 #define __FUNCT__ "PCDestroy_ASM"
333 static int PCDestroy_ASM(PC pc)
334 {
335   PC_ASM *osm = (PC_ASM*)pc->data;
336   int    i,ierr;
337 
338   PetscFunctionBegin;
339   for (i=0; i<osm->n_local; i++) {
340     ierr = VecScatterDestroy(osm->scat[i]);CHKERRQ(ierr);
341     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
342     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
343   }
344   if (osm->n_local_true > 0 && !osm->inplace && osm->pmat) {
345     ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
346   }
347   if (osm->ksp) {
348     for (i=0; i<osm->n_local_true; i++) {
349       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
350     }
351   }
352   if (osm->is_flg) {
353     for (i=0; i<osm->n_local_true; i++) {ierr = ISDestroy(osm->is[i]);CHKERRQ(ierr);}
354     ierr = PetscFree(osm->is);CHKERRQ(ierr);
355   }
356   if (osm->ksp) {ierr = PetscFree(osm->ksp);CHKERRQ(ierr);}
357   if (osm->scat) {ierr = PetscFree(osm->scat);CHKERRQ(ierr);}
358   if (osm->x) {ierr = PetscFree(osm->x);CHKERRQ(ierr);}
359   ierr = PetscFree(osm);CHKERRQ(ierr);
360   PetscFunctionReturn(0);
361 }
362 
363 #undef __FUNCT__
364 #define __FUNCT__ "PCSetFromOptions_ASM"
365 static int PCSetFromOptions_ASM(PC pc)
366 {
367   PC_ASM     *osm = (PC_ASM*)pc->data;
368   int        blocks,ovl,ierr,indx;
369   PetscTruth flg;
370   char       *type[] = {"basic","restrict","interpolate","none"};
371 
372   PetscFunctionBegin;
373   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
374     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
375     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL);CHKERRQ(ierr); }
376     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
377     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
378     ierr = PetscOptionsName("-pc_asm_in_place","Perform matrix factorization inplace","PCASMSetUseInPlace",&flg);CHKERRQ(ierr);
379     if (flg) {ierr = PCASMSetUseInPlace(pc);CHKERRQ(ierr); }
380     ierr = PetscOptionsEList("-pc_asm_type","Type of restriction/extension","PCASMSetType",type,4,type[1],&indx,&flg);CHKERRQ(ierr);
381     if (flg) {
382       ierr = PCASMSetType(pc,(PCASMType)indx);CHKERRQ(ierr);
383     }
384   ierr = PetscOptionsTail();CHKERRQ(ierr);
385   PetscFunctionReturn(0);
386 }
387 
388 /*------------------------------------------------------------------------------------*/
389 
390 EXTERN_C_BEGIN
391 #undef __FUNCT__
392 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
393 int PCASMSetLocalSubdomains_ASM(PC pc,int n,IS is[])
394 {
395   PC_ASM *osm = (PC_ASM*)pc->data;
396 
397   PetscFunctionBegin;
398   if (n < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 0 or more blocks");
399 
400   if (pc->setupcalled && n != osm->n_local_true) {
401     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
402   }
403   if (!pc->setupcalled){
404     osm->n_local_true = n;
405     osm->is           = is;
406   }
407   PetscFunctionReturn(0);
408 }
409 EXTERN_C_END
410 
411 EXTERN_C_BEGIN
412 #undef __FUNCT__
413 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
414 int PCASMSetTotalSubdomains_ASM(PC pc,int N,IS *is)
415 {
416   PC_ASM *osm = (PC_ASM*)pc->data;
417   int    rank,size,ierr,n;
418 
419   PetscFunctionBegin;
420 
421   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\
422 they cannot be set globally yet.");
423 
424   /*
425      Split the subdomains equally amoung all processors
426   */
427   ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr);
428   ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr);
429   n = N/size + ((N % size) > rank);
430   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
431   if (!pc->setupcalled){
432     osm->n_local_true = n;
433     osm->is           = 0;
434   }
435   PetscFunctionReturn(0);
436 }
437 EXTERN_C_END
438 
439 EXTERN_C_BEGIN
440 #undef __FUNCT__
441 #define __FUNCT__ "PCASMSetOverlap_ASM"
442 int PCASMSetOverlap_ASM(PC pc,int ovl)
443 {
444   PC_ASM *osm;
445 
446   PetscFunctionBegin;
447   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
448 
449   osm               = (PC_ASM*)pc->data;
450   osm->overlap      = ovl;
451   PetscFunctionReturn(0);
452 }
453 EXTERN_C_END
454 
455 EXTERN_C_BEGIN
456 #undef __FUNCT__
457 #define __FUNCT__ "PCASMSetType_ASM"
458 int PCASMSetType_ASM(PC pc,PCASMType type)
459 {
460   PC_ASM *osm;
461 
462   PetscFunctionBegin;
463   osm        = (PC_ASM*)pc->data;
464   osm->type  = type;
465   PetscFunctionReturn(0);
466 }
467 EXTERN_C_END
468 
469 EXTERN_C_BEGIN
470 #undef __FUNCT__
471 #define __FUNCT__ "PCASMGetSubKSP_ASM"
472 int PCASMGetSubKSP_ASM(PC pc,int *n_local,int *first_local,KSP **ksp)
473 {
474   PC_ASM   *jac = (PC_ASM*)pc->data;
475   int      ierr;
476 
477   PetscFunctionBegin;
478   if (jac->n_local_true < 0) {
479     SETERRQ(1,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
480   }
481 
482   if (n_local)     *n_local     = jac->n_local_true;
483   if (first_local) {
484     ierr = MPI_Scan(&jac->n_local_true,first_local,1,MPI_INT,MPI_SUM,pc->comm);CHKERRQ(ierr);
485     *first_local -= jac->n_local_true;
486   }
487   *ksp                         = jac->ksp;
488   jac->same_local_solves        = PETSC_FALSE; /* Assume that local solves are now different;
489                                       not necessarily true though!  This flag is
490                                       used only for PCView_ASM() */
491   PetscFunctionReturn(0);
492 }
493 EXTERN_C_END
494 
495 EXTERN_C_BEGIN
496 #undef __FUNCT__
497 #define __FUNCT__ "PCASMSetUseInPlace_ASM"
498 int PCASMSetUseInPlace_ASM(PC pc)
499 {
500   PC_ASM *dir;
501 
502   PetscFunctionBegin;
503   dir          = (PC_ASM*)pc->data;
504   dir->inplace = PETSC_TRUE;
505   PetscFunctionReturn(0);
506 }
507 EXTERN_C_END
508 
509 /*----------------------------------------------------------------------------*/
510 #undef __FUNCT__
511 #define __FUNCT__ "PCASMSetUseInPlace"
512 /*@
513    PCASMSetUseInPlace - Tells the system to destroy the matrix after setup is done.
514 
515    Collective on PC
516 
517    Input Parameters:
518 .  pc - the preconditioner context
519 
520    Options Database Key:
521 .  -pc_asm_in_place - Activates in-place factorization
522 
523    Note:
524    PCASMSetUseInplace() can only be used with the KSP method KSPPREONLY, and
525    when the original matrix is not required during the Solve process.
526    This destroys the matrix, early thus, saving on memory usage.
527 
528    Level: intermediate
529 
530 .keywords: PC, set, factorization, direct, inplace, in-place, ASM
531 
532 .seealso: PCILUSetUseInPlace(), PCLUSetUseInPlace ()
533 @*/
534 int PCASMSetUseInPlace(PC pc)
535 {
536   int ierr,(*f)(PC);
537 
538   PetscFunctionBegin;
539   PetscValidHeaderSpecific(pc,PC_COOKIE);
540   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetUseInPlace_C",(void (**)(void))&f);CHKERRQ(ierr);
541   if (f) {
542     ierr = (*f)(pc);CHKERRQ(ierr);
543   }
544   PetscFunctionReturn(0);
545 }
546 /*----------------------------------------------------------------------------*/
547 
548 #undef __FUNCT__
549 #define __FUNCT__ "PCASMSetLocalSubdomains"
550 /*@C
551     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
552     only) for the additive Schwarz preconditioner.
553 
554     Collective on PC
555 
556     Input Parameters:
557 +   pc - the preconditioner context
558 .   n - the number of subdomains for this processor (default value = 1)
559 -   is - the index sets that define the subdomains for this processor
560          (or PETSC_NULL for PETSc to determine subdomains)
561 
562     Notes:
563     The IS numbering is in the parallel, global numbering of the vector.
564 
565     By default the ASM preconditioner uses 1 block per processor.
566 
567     These index sets cannot be destroyed until after completion of the
568     linear solves for which the ASM preconditioner is being used.
569 
570     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
571 
572     Level: advanced
573 
574 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
575 
576 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
577           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
578 @*/
579 int PCASMSetLocalSubdomains(PC pc,int n,IS is[])
580 {
581   int ierr,(*f)(PC,int,IS[]);
582 
583   PetscFunctionBegin;
584   PetscValidHeaderSpecific(pc,PC_COOKIE);
585   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
586   if (f) {
587     ierr = (*f)(pc,n,is);CHKERRQ(ierr);
588   }
589   PetscFunctionReturn(0);
590 }
591 
592 #undef __FUNCT__
593 #define __FUNCT__ "PCASMSetTotalSubdomains"
594 /*@C
595     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
596     additive Schwarz preconditioner.  Either all or no processors in the
597     PC communicator must call this routine, with the same index sets.
598 
599     Collective on PC
600 
601     Input Parameters:
602 +   pc - the preconditioner context
603 .   n - the number of subdomains for all processors
604 -   is - the index sets that define the subdomains for all processor
605          (or PETSC_NULL for PETSc to determine subdomains)
606 
607     Options Database Key:
608     To set the total number of subdomain blocks rather than specify the
609     index sets, use the option
610 .    -pc_asm_blocks <blks> - Sets total blocks
611 
612     Notes:
613     Currently you cannot use this to set the actual subdomains with the argument is.
614 
615     By default the ASM preconditioner uses 1 block per processor.
616 
617     These index sets cannot be destroyed until after completion of the
618     linear solves for which the ASM preconditioner is being used.
619 
620     Use PCASMSetLocalSubdomains() to set local subdomains.
621 
622     Level: advanced
623 
624 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
625 
626 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
627           PCASMCreateSubdomains2D()
628 @*/
629 int PCASMSetTotalSubdomains(PC pc,int N,IS *is)
630 {
631   int ierr,(*f)(PC,int,IS *);
632 
633   PetscFunctionBegin;
634   PetscValidHeaderSpecific(pc,PC_COOKIE);
635   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
636   if (f) {
637     ierr = (*f)(pc,N,is);CHKERRQ(ierr);
638   }
639   PetscFunctionReturn(0);
640 }
641 
642 #undef __FUNCT__
643 #define __FUNCT__ "PCASMSetOverlap"
644 /*@
645     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
646     additive Schwarz preconditioner.  Either all or no processors in the
647     PC communicator must call this routine.
648 
649     Collective on PC
650 
651     Input Parameters:
652 +   pc  - the preconditioner context
653 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
654 
655     Options Database Key:
656 .   -pc_asm_overlap <ovl> - Sets overlap
657 
658     Notes:
659     By default the ASM preconditioner uses 1 block per processor.  To use
660     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
661     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
662 
663     The overlap defaults to 1, so if one desires that no additional
664     overlap be computed beyond what may have been set with a call to
665     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
666     must be set to be 0.  In particular, if one does not explicitly set
667     the subdomains an application code, then all overlap would be computed
668     internally by PETSc, and using an overlap of 0 would result in an ASM
669     variant that is equivalent to the block Jacobi preconditioner.
670 
671     Note that one can define initial index sets with any overlap via
672     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
673     PCASMSetOverlap() merely allows PETSc to extend that overlap further
674     if desired.
675 
676     Level: intermediate
677 
678 .keywords: PC, ASM, set, overlap
679 
680 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
681           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
682 @*/
683 int PCASMSetOverlap(PC pc,int ovl)
684 {
685   int ierr,(*f)(PC,int);
686 
687   PetscFunctionBegin;
688   PetscValidHeaderSpecific(pc,PC_COOKIE);
689   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
690   if (f) {
691     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
692   }
693   PetscFunctionReturn(0);
694 }
695 
696 #undef __FUNCT__
697 #define __FUNCT__ "PCASMSetType"
698 /*@
699     PCASMSetType - Sets the type of restriction and interpolation used
700     for local problems in the additive Schwarz method.
701 
702     Collective on PC
703 
704     Input Parameters:
705 +   pc  - the preconditioner context
706 -   type - variant of ASM, one of
707 .vb
708       PC_ASM_BASIC       - full interpolation and restriction
709       PC_ASM_RESTRICT    - full restriction, local processor interpolation
710       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
711       PC_ASM_NONE        - local processor restriction and interpolation
712 .ve
713 
714     Options Database Key:
715 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
716 
717     Level: intermediate
718 
719 .keywords: PC, ASM, set, type
720 
721 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
722           PCASMCreateSubdomains2D()
723 @*/
724 int PCASMSetType(PC pc,PCASMType type)
725 {
726   int ierr,(*f)(PC,PCASMType);
727 
728   PetscFunctionBegin;
729   PetscValidHeaderSpecific(pc,PC_COOKIE);
730   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
731   if (f) {
732     ierr = (*f)(pc,type);CHKERRQ(ierr);
733   }
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "PCASMGetSubKSP"
739 /*@C
740    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
741    this processor.
742 
743    Collective on PC iff first_local is requested
744 
745    Input Parameter:
746 .  pc - the preconditioner context
747 
748    Output Parameters:
749 +  n_local - the number of blocks on this processor or PETSC_NULL
750 .  first_local - the global number of the first block on this processor or PETSC_NULL,
751                  all processors must request or all must pass PETSC_NULL
752 -  ksp - the array of KSP contexts
753 
754    Note:
755    After PCASMGetSubKSP() the array of KSPes is not to be freed
756 
757    Currently for some matrix implementations only 1 block per processor
758    is supported.
759 
760    You must call KSPSetUp() before calling PCASMGetSubKSP().
761 
762    Level: advanced
763 
764 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
765 
766 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
767           PCASMCreateSubdomains2D(),
768 @*/
769 int PCASMGetSubKSP(PC pc,int *n_local,int *first_local,KSP *ksp[])
770 {
771   int ierr,(*f)(PC,int*,int*,KSP **);
772 
773   PetscFunctionBegin;
774   PetscValidHeaderSpecific(pc,PC_COOKIE);
775   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
776   if (f) {
777     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
778   } else {
779     SETERRQ(1,"Cannot get subksp for this type of PC");
780   }
781 
782  PetscFunctionReturn(0);
783 }
784 
785 /* -------------------------------------------------------------------------------------*/
786 /*MC
787    PCASM - Use the additive Schwarz method, each block is (approximately) solved with
788            its own KSP object.
789 
790    Options Database Keys:
791 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
792 .  -pc_asm_in_place - Activates in-place factorization
793 .  -pc_asm_blocks <blks> - Sets total blocks
794 .  -pc_asm_overlap <ovl> - Sets overlap
795 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
796 
797    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
798      than one processor. Defaults to one block per processor.
799 
800      To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC
801         options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly
802 
803      To set the options on the solvers seperate for each block call PCASMGetSubKSP()
804          and set the options directly on the resulting KSP object (you can access its PC
805          with KSPGetPC())
806 
807    Level: beginner
808 
809    Concepts: additive Schwarz method
810 
811 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
812            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
813            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(),
814            PCASMSetUseInPlace()
815 M*/
816 
817 EXTERN_C_BEGIN
818 #undef __FUNCT__
819 #define __FUNCT__ "PCCreate_ASM"
820 int PCCreate_ASM(PC pc)
821 {
822   int    ierr;
823   PC_ASM *osm;
824 
825   PetscFunctionBegin;
826   ierr = PetscNew(PC_ASM,&osm);CHKERRQ(ierr);
827   PetscLogObjectMemory(pc,sizeof(PC_ASM));
828   ierr = PetscMemzero(osm,sizeof(PC_ASM));CHKERRQ(ierr);
829   osm->n                 = PETSC_DECIDE;
830   osm->n_local           = 0;
831   osm->n_local_true      = PETSC_DECIDE;
832   osm->overlap           = 1;
833   osm->is_flg            = PETSC_FALSE;
834   osm->ksp              = 0;
835   osm->scat              = 0;
836   osm->is                = 0;
837   osm->mat               = 0;
838   osm->pmat              = 0;
839   osm->type              = PC_ASM_RESTRICT;
840   osm->same_local_solves = PETSC_TRUE;
841   osm->inplace           = PETSC_FALSE;
842   pc->data               = (void*)osm;
843 
844   pc->ops->apply             = PCApply_ASM;
845   pc->ops->applytranspose    = PCApplyTranspose_ASM;
846   pc->ops->setup             = PCSetUp_ASM;
847   pc->ops->destroy           = PCDestroy_ASM;
848   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
849   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
850   pc->ops->view              = PCView_ASM;
851   pc->ops->applyrichardson   = 0;
852 
853   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
854                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
855   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
856                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
857   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
858                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
859   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
860                     PCASMSetType_ASM);CHKERRQ(ierr);
861   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
862                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
863 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetUseInPlace_C","PCASMSetUseInPlace_ASM",
864                     PCASMSetUseInPlace_ASM);CHKERRQ(ierr);
865   PetscFunctionReturn(0);
866 }
867 EXTERN_C_END
868 
869 
870 #undef __FUNCT__
871 #define __FUNCT__ "PCASMCreateSubdomains2D"
872 /*@
873    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
874    preconditioner for a two-dimensional problem on a regular grid.
875 
876    Not Collective
877 
878    Input Parameters:
879 +  m, n - the number of mesh points in the x and y directions
880 .  M, N - the number of subdomains in the x and y directions
881 .  dof - degrees of freedom per node
882 -  overlap - overlap in mesh lines
883 
884    Output Parameters:
885 +  Nsub - the number of subdomains created
886 -  is - the array of index sets defining the subdomains
887 
888    Note:
889    Presently PCAMSCreateSubdomains2d() is valid only for sequential
890    preconditioners.  More general related routines are
891    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
892 
893    Level: advanced
894 
895 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
896 
897 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
898           PCASMSetOverlap()
899 @*/
900 int PCASMCreateSubdomains2D(int m,int n,int M,int N,int dof,int overlap,int *Nsub,IS **is)
901 {
902   int i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
903   int nidx,*idx,loc,ii,jj,ierr,count;
904 
905   PetscFunctionBegin;
906   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
907 
908   *Nsub = N*M;
909   ierr = PetscMalloc((*Nsub)*sizeof(IS **),is);CHKERRQ(ierr);
910   ystart = 0;
911   loc_outter = 0;
912   for (i=0; i<N; i++) {
913     height = n/N + ((n % N) > i); /* height of subdomain */
914     if (height < 2) SETERRQ(1,"Too many N subdomains for mesh dimension n");
915     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
916     yright = ystart + height + overlap; if (yright > n) yright = n;
917     xstart = 0;
918     for (j=0; j<M; j++) {
919       width = m/M + ((m % M) > j); /* width of subdomain */
920       if (width < 2) SETERRQ(1,"Too many M subdomains for mesh dimension m");
921       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
922       xright = xstart + width + overlap; if (xright > m) xright = m;
923       nidx   = (xright - xleft)*(yright - yleft);
924       ierr = PetscMalloc(nidx*sizeof(int),&idx);CHKERRQ(ierr);
925       loc    = 0;
926       for (ii=yleft; ii<yright; ii++) {
927         count = m*ii + xleft;
928         for (jj=xleft; jj<xright; jj++) {
929           idx[loc++] = count++;
930         }
931       }
932       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
933       ierr = PetscFree(idx);CHKERRQ(ierr);
934       xstart += width;
935     }
936     ystart += height;
937   }
938   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
939   PetscFunctionReturn(0);
940 }
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "PCASMGetLocalSubdomains"
944 /*@C
945     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
946     only) for the additive Schwarz preconditioner.
947 
948     Collective on PC
949 
950     Input Parameter:
951 .   pc - the preconditioner context
952 
953     Output Parameters:
954 +   n - the number of subdomains for this processor (default value = 1)
955 -   is - the index sets that define the subdomains for this processor
956 
957 
958     Notes:
959     The IS numbering is in the parallel, global numbering of the vector.
960 
961     Level: advanced
962 
963 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
964 
965 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
966           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
967 @*/
968 int PCASMGetLocalSubdomains(PC pc,int *n,IS *is[])
969 {
970   PC_ASM *osm;
971 
972   PetscFunctionBegin;
973   PetscValidHeaderSpecific(pc,PC_COOKIE);
974   if (!pc->setupcalled) {
975     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
976   }
977 
978   osm = (PC_ASM*)pc->data;
979   if (n)  *n = osm->n_local_true;
980   if (is) *is = osm->is;
981   PetscFunctionReturn(0);
982 }
983 
984 #undef __FUNCT__
985 #define __FUNCT__ "PCASMGetLocalSubmatrices"
986 /*@C
987     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
988     only) for the additive Schwarz preconditioner.
989 
990     Collective on PC
991 
992     Input Parameter:
993 .   pc - the preconditioner context
994 
995     Output Parameters:
996 +   n - the number of matrices for this processor (default value = 1)
997 -   mat - the matrices
998 
999 
1000     Level: advanced
1001 
1002 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1003 
1004 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1005           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1006 @*/
1007 int PCASMGetLocalSubmatrices(PC pc,int *n,Mat *mat[])
1008 {
1009   PC_ASM *osm;
1010 
1011   PetscFunctionBegin;
1012   PetscValidHeaderSpecific(pc,PC_COOKIE);
1013   if (!pc->setupcalled) {
1014     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1015   }
1016 
1017   osm = (PC_ASM*)pc->data;
1018   if (n)   *n   = osm->n_local_true;
1019   if (mat) *mat = osm->pmat;
1020   PetscFunctionReturn(0);
1021 }
1022 
1023