xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 75e339b729df755901000c5aa562fd7cd11bb49e)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   /* For multiplicative solve */
34   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
35   Vec        lx, ly;              /* work vectors */
36   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "PCView_ASM"
41 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42 {
43   PC_ASM         *osm = (PC_ASM*)pc->data;
44   PetscErrorCode ierr;
45   PetscMPIInt    rank;
46   PetscInt       i,bsz;
47   PetscBool      iascii,isstring;
48   PetscViewer    sviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
56     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
57     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
59     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
60     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
61     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
62     if (osm->same_local_solves) {
63       if (osm->ksp) {
64         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
65         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
66         if (!rank) {
67           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
68           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
69           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
70         }
71         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
72       }
73     } else {
74       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
75       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
76       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
79       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
81       for (i=0; i<osm->n_local_true; i++) {
82         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
83         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
84         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
85         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
86       }
87       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
88       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
91     }
92   } else if (isstring) {
93     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
94     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
95     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
96     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
97   }
98   PetscFunctionReturn(0);
99 }
100 
101 #undef __FUNCT__
102 #define __FUNCT__ "PCASMPrintSubdomains"
103 static PetscErrorCode PCASMPrintSubdomains(PC pc)
104 {
105   PC_ASM         *osm = (PC_ASM*)pc->data;
106   const char     *prefix;
107   char           fname[PETSC_MAX_PATH_LEN+1];
108   PetscViewer    viewer, sviewer;
109   char           *s;
110   PetscInt       i,j,nidx;
111   const PetscInt *idx;
112   PetscMPIInt    rank, size;
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
117   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
118   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
119   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
120   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
121   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
122   for (i=0; i<osm->n_local; i++) {
123     if (i < osm->n_local_true) {
124       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
125       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
126       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
127       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
128       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
129       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
130       for (j=0; j<nidx; j++) {
131         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
132       }
133       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
134       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
135       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
138       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
139       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
140       ierr = PetscFree(s);CHKERRQ(ierr);
141       if (osm->is_local) {
142         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
143         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
144         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
145         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
146         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
147         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
148         for (j=0; j<nidx; j++) {
149           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
150         }
151         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
152         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
153         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
156         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
157         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
158         ierr = PetscFree(s);CHKERRQ(ierr);
159       }
160     } else {
161       /* Participate in collective viewer calls. */
162       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
164       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
165       /* Assume either all ranks have is_local or none do. */
166       if (osm->is_local) {
167         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
169         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
170       }
171     }
172   }
173   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
174   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
175   PetscFunctionReturn(0);
176 }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "PCSetUp_ASM"
180 static PetscErrorCode PCSetUp_ASM(PC pc)
181 {
182   PC_ASM         *osm = (PC_ASM*)pc->data;
183   PetscErrorCode ierr;
184   PetscBool      symset,flg;
185   PetscInt       i,m,m_local;
186   MatReuse       scall = MAT_REUSE_MATRIX;
187   IS             isl;
188   KSP            ksp;
189   PC             subpc;
190   const char     *prefix,*pprefix;
191   Vec            vec;
192   DM             *domain_dm = NULL;
193 
194   PetscFunctionBegin;
195   if (!pc->setupcalled) {
196 
197     if (!osm->type_set) {
198       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
199       if (symset && flg) osm->type = PC_ASM_BASIC;
200     }
201 
202     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
203     if (osm->n_local_true == PETSC_DECIDE) {
204       /* no subdomains given */
205       /* try pc->dm first, if allowed */
206       if (osm->dm_subdomains && pc->dm) {
207         PetscInt  num_domains, d;
208         char      **domain_names;
209         IS        *inner_domain_is, *outer_domain_is;
210         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
211         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
212                               A future improvement of this code might allow one to use
213                               DM-defined subdomains and also increase the overlap,
214                               but that is not currently supported */
215         if (num_domains) {
216           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
217         }
218         for (d = 0; d < num_domains; ++d) {
219           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
220           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
221           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
222         }
223         ierr = PetscFree(domain_names);CHKERRQ(ierr);
224         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
225         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
226       }
227       if (osm->n_local_true == PETSC_DECIDE) {
228         /* still no subdomains; use one subdomain per processor */
229         osm->n_local_true = 1;
230       }
231     }
232     { /* determine the global and max number of subdomains */
233       struct {PetscInt max,sum;} inwork,outwork;
234       inwork.max   = osm->n_local_true;
235       inwork.sum   = osm->n_local_true;
236       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
237       osm->n_local = outwork.max;
238       osm->n       = outwork.sum;
239     }
240     if (!osm->is) { /* create the index sets */
241       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
242     }
243     if (osm->n_local_true > 1 && !osm->is_local) {
244       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
245       for (i=0; i<osm->n_local_true; i++) {
246         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
247           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
248           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
249         } else {
250           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
251           osm->is_local[i] = osm->is[i];
252         }
253       }
254     }
255     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
256     flg  = PETSC_FALSE;
257     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
258     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
259 
260     if (osm->overlap > 0) {
261       /* Extend the "overlapping" regions by a number of steps */
262       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
263     }
264     if (osm->sort_indices) {
265       for (i=0; i<osm->n_local_true; i++) {
266         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
267         if (osm->is_local) {
268           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
269         }
270       }
271     }
272     /* Create the local work vectors and scatter contexts */
273     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
274     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
275     if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
276     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
277     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
278     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
279     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
280     for (i=0; i<osm->n_local_true; ++i) {
281       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
282       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
283       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
284       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
285       ierr = ISDestroy(&isl);CHKERRQ(ierr);
286       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
287       if (osm->is_local) {
288         ISLocalToGlobalMapping ltog;
289         IS                     isll;
290         const PetscInt         *idx_local;
291         PetscInt               *idx,nout;
292 
293         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
294         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
295         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
296         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
297         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
298         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
299         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
300         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
301         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
302         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
303         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
304         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
305         ierr = ISDestroy(&isll);CHKERRQ(ierr);
306 
307         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
308         ierr = ISDestroy(&isl);CHKERRQ(ierr);
309       } else {
310         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
311 
312         osm->y_local[i] = osm->y[i];
313 
314         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
315 
316         osm->prolongation[i] = osm->restriction[i];
317 
318         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
319       }
320     }
321     for (i=osm->n_local_true; i<osm->n_local; i++) {
322       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
323       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
324       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
325       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
326       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
327       if (osm->is_local) {
328         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
329         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
330       } else {
331         osm->prolongation[i] = osm->restriction[i];
332         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
333       }
334       ierr = ISDestroy(&isl);CHKERRQ(ierr);
335     }
336     ierr = VecDestroy(&vec);CHKERRQ(ierr);
337 
338     if (!osm->ksp) {
339       /* Create the local solvers */
340       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
341       if (domain_dm) {
342         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
343       }
344       for (i=0; i<osm->n_local_true; i++) {
345         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
346         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
347         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
348         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
349         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
350         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
351         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
352         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
353         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
354         if (domain_dm) {
355           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
356           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
357           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
358         }
359         osm->ksp[i] = ksp;
360       }
361       if (domain_dm) {
362         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
363       }
364     }
365     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
366       PetscInt m;
367 
368       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
369       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
370       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
371       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
372       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
373     }
374     scall = MAT_INITIAL_MATRIX;
375   } else {
376     /*
377        Destroy the blocks from the previous iteration
378     */
379     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
380       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
381       scall = MAT_INITIAL_MATRIX;
382     }
383   }
384 
385   /*
386      Extract out the submatrices
387   */
388   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
389   if (scall == MAT_INITIAL_MATRIX) {
390     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
391     for (i=0; i<osm->n_local_true; i++) {
392       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
393       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
394     }
395   }
396   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
397     IS      *cis;
398     PetscInt c;
399 
400     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
401     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
402     ierr = MatGetSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
403     ierr = PetscFree(cis);CHKERRQ(ierr);
404   }
405 
406   /* Return control to the user so that the submatrices can be modified (e.g., to apply
407      different boundary conditions for the submatrices than for the global problem) */
408   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
409 
410   /*
411      Loop over subdomains putting them into local ksp
412   */
413   for (i=0; i<osm->n_local_true; i++) {
414     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
415     if (!pc->setupcalled) {
416       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
417     }
418   }
419   PetscFunctionReturn(0);
420 }
421 
422 #undef __FUNCT__
423 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
424 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
425 {
426   PC_ASM             *osm = (PC_ASM*)pc->data;
427   PetscErrorCode     ierr;
428   PetscInt           i;
429   KSPConvergedReason reason;
430 
431   PetscFunctionBegin;
432   for (i=0; i<osm->n_local_true; i++) {
433     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
434     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
435     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
436       pc->failedreason = PC_SUBPC_ERROR;
437     }
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "PCApply_ASM"
444 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
445 {
446   PC_ASM         *osm = (PC_ASM*)pc->data;
447   PetscErrorCode ierr;
448   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
449   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
450 
451   PetscFunctionBegin;
452   /*
453      Support for limiting the restriction or interpolation to only local
454      subdomain values (leaving the other values 0).
455   */
456   if (!(osm->type & PC_ASM_RESTRICT)) {
457     forward = SCATTER_FORWARD_LOCAL;
458     /* have to zero the work RHS since scatter may leave some slots empty */
459     for (i=0; i<n_local_true; i++) {
460       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
461     }
462   }
463   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
464 
465   switch (osm->loctype)
466   {
467   case PC_COMPOSITE_ADDITIVE:
468     for (i=0; i<n_local; i++) {
469       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
470     }
471     ierr = VecZeroEntries(y);CHKERRQ(ierr);
472     /* do the local solves */
473     for (i=0; i<n_local_true; i++) {
474       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
475       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
476       if (osm->localization) {
477         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
478         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
479       }
480       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
481     }
482     /* handle the rest of the scatters that do not have local solves */
483     for (i=n_local_true; i<n_local; i++) {
484       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
485       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
486     }
487     for (i=0; i<n_local; i++) {
488       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
489     }
490     break;
491   case PC_COMPOSITE_MULTIPLICATIVE:
492     ierr = VecZeroEntries(y);CHKERRQ(ierr);
493     /* do the local solves */
494     for (i = 0; i < n_local_true; ++i) {
495       if (i > 0) {
496         /* Update rhs */
497         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
498         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
499       } else {
500         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
501       }
502       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
503       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
504       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
505       if (osm->localization) {
506         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
507         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
508       }
509       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
510       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
511       if (i < n_local_true-1) {
512         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
513         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
514         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
515         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
516         ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
517         ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
518         ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
519       }
520     }
521     /* handle the rest of the scatters that do not have local solves */
522     for (i = n_local_true; i < n_local; ++i) {
523       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
524       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
525       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
526       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
527     }
528     break;
529   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
530   }
531   PetscFunctionReturn(0);
532 }
533 
534 #undef __FUNCT__
535 #define __FUNCT__ "PCApplyTranspose_ASM"
536 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
537 {
538   PC_ASM         *osm = (PC_ASM*)pc->data;
539   PetscErrorCode ierr;
540   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
541   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
542 
543   PetscFunctionBegin;
544   /*
545      Support for limiting the restriction or interpolation to only local
546      subdomain values (leaving the other values 0).
547 
548      Note: these are reversed from the PCApply_ASM() because we are applying the
549      transpose of the three terms
550   */
551   if (!(osm->type & PC_ASM_INTERPOLATE)) {
552     forward = SCATTER_FORWARD_LOCAL;
553     /* have to zero the work RHS since scatter may leave some slots empty */
554     for (i=0; i<n_local_true; i++) {
555       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
556     }
557   }
558   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
559 
560   for (i=0; i<n_local; i++) {
561     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
562   }
563   ierr = VecZeroEntries(y);CHKERRQ(ierr);
564   /* do the local solves */
565   for (i=0; i<n_local_true; i++) {
566     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
567     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
568     if (osm->localization) {
569       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
570       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
571     }
572     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
573   }
574   /* handle the rest of the scatters that do not have local solves */
575   for (i=n_local_true; i<n_local; i++) {
576     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
577     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
578   }
579   for (i=0; i<n_local; i++) {
580     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
581   }
582   PetscFunctionReturn(0);
583 }
584 
585 #undef __FUNCT__
586 #define __FUNCT__ "PCReset_ASM"
587 static PetscErrorCode PCReset_ASM(PC pc)
588 {
589   PC_ASM         *osm = (PC_ASM*)pc->data;
590   PetscErrorCode ierr;
591   PetscInt       i;
592 
593   PetscFunctionBegin;
594   if (osm->ksp) {
595     for (i=0; i<osm->n_local_true; i++) {
596       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
597     }
598   }
599   if (osm->pmat) {
600     if (osm->n_local_true > 0) {
601       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
602     }
603   }
604   if (osm->restriction) {
605     for (i=0; i<osm->n_local; i++) {
606       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
607       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
608       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
609       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
610       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
611       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
612     }
613     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
614     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
615     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
616     ierr = PetscFree(osm->x);CHKERRQ(ierr);
617     ierr = PetscFree(osm->y);CHKERRQ(ierr);
618     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
619   }
620   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
621   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
622     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
623     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
624     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
625     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
626   }
627 
628   osm->is       = 0;
629   osm->is_local = 0;
630   PetscFunctionReturn(0);
631 }
632 
633 #undef __FUNCT__
634 #define __FUNCT__ "PCDestroy_ASM"
635 static PetscErrorCode PCDestroy_ASM(PC pc)
636 {
637   PC_ASM         *osm = (PC_ASM*)pc->data;
638   PetscErrorCode ierr;
639   PetscInt       i;
640 
641   PetscFunctionBegin;
642   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
643   if (osm->ksp) {
644     for (i=0; i<osm->n_local_true; i++) {
645       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
646     }
647     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
648   }
649   ierr = PetscFree(pc->data);CHKERRQ(ierr);
650   PetscFunctionReturn(0);
651 }
652 
653 #undef __FUNCT__
654 #define __FUNCT__ "PCSetFromOptions_ASM"
655 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
656 {
657   PC_ASM         *osm = (PC_ASM*)pc->data;
658   PetscErrorCode ierr;
659   PetscInt       blocks,ovl;
660   PetscBool      symset,flg;
661   PCASMType      asmtype;
662   PCCompositeType loctype;
663 
664   PetscFunctionBegin;
665   /* set the type to symmetric if matrix is symmetric */
666   if (!osm->type_set && pc->pmat) {
667     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
668     if (symset && flg) osm->type = PC_ASM_BASIC;
669   }
670   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
671   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
672   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
673   if (flg) {
674     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
675     osm->dm_subdomains = PETSC_FALSE;
676   }
677   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
678   if (flg) {
679     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
680     osm->dm_subdomains = PETSC_FALSE;
681   }
682   flg  = PETSC_FALSE;
683   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
684   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
685   flg  = PETSC_FALSE;
686   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
687   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
688   ierr = PetscOptionsTail();CHKERRQ(ierr);
689   PetscFunctionReturn(0);
690 }
691 
692 /*------------------------------------------------------------------------------------*/
693 
694 #undef __FUNCT__
695 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
696 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
697 {
698   PC_ASM         *osm = (PC_ASM*)pc->data;
699   PetscErrorCode ierr;
700   PetscInt       i;
701 
702   PetscFunctionBegin;
703   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
704   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
705 
706   if (!pc->setupcalled) {
707     if (is) {
708       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
709     }
710     if (is_local) {
711       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
712     }
713     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
714 
715     osm->n_local_true = n;
716     osm->is           = 0;
717     osm->is_local     = 0;
718     if (is) {
719       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
720       for (i=0; i<n; i++) osm->is[i] = is[i];
721       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
722       osm->overlap = -1;
723     }
724     if (is_local) {
725       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
726       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
727       if (!is) {
728         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
729         for (i=0; i<osm->n_local_true; i++) {
730           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
731             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
732             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
733           } else {
734             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
735             osm->is[i] = osm->is_local[i];
736           }
737         }
738       }
739     }
740   }
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
746 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
747 {
748   PC_ASM         *osm = (PC_ASM*)pc->data;
749   PetscErrorCode ierr;
750   PetscMPIInt    rank,size;
751   PetscInt       n;
752 
753   PetscFunctionBegin;
754   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
755   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
756 
757   /*
758      Split the subdomains equally among all processors
759   */
760   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
761   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
762   n    = N/size + ((N % size) > rank);
763   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
764   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
765   if (!pc->setupcalled) {
766     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
767 
768     osm->n_local_true = n;
769     osm->is           = 0;
770     osm->is_local     = 0;
771   }
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "PCASMSetOverlap_ASM"
777 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
778 {
779   PC_ASM *osm = (PC_ASM*)pc->data;
780 
781   PetscFunctionBegin;
782   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
783   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
784   if (!pc->setupcalled) osm->overlap = ovl;
785   PetscFunctionReturn(0);
786 }
787 
788 #undef __FUNCT__
789 #define __FUNCT__ "PCASMSetType_ASM"
790 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
791 {
792   PC_ASM *osm = (PC_ASM*)pc->data;
793 
794   PetscFunctionBegin;
795   osm->type     = type;
796   osm->type_set = PETSC_TRUE;
797   PetscFunctionReturn(0);
798 }
799 
800 #undef __FUNCT__
801 #define __FUNCT__ "PCASMGetType_ASM"
802 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
803 {
804   PC_ASM *osm = (PC_ASM*)pc->data;
805 
806   PetscFunctionBegin;
807   *type = osm->type;
808   PetscFunctionReturn(0);
809 }
810 
811 #undef __FUNCT__
812 #define __FUNCT__ "PCASMSetLocalType_ASM"
813 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
814 {
815   PC_ASM *osm = (PC_ASM *) pc->data;
816 
817   PetscFunctionBegin;
818   osm->loctype = type;
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "PCASMGetLocalType_ASM"
824 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
825 {
826   PC_ASM *osm = (PC_ASM *) pc->data;
827 
828   PetscFunctionBegin;
829   *type = osm->loctype;
830   PetscFunctionReturn(0);
831 }
832 
833 #undef __FUNCT__
834 #define __FUNCT__ "PCASMSetSortIndices_ASM"
835 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
836 {
837   PC_ASM *osm = (PC_ASM*)pc->data;
838 
839   PetscFunctionBegin;
840   osm->sort_indices = doSort;
841   PetscFunctionReturn(0);
842 }
843 
844 #undef __FUNCT__
845 #define __FUNCT__ "PCASMGetSubKSP_ASM"
846 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
847 {
848   PC_ASM         *osm = (PC_ASM*)pc->data;
849   PetscErrorCode ierr;
850 
851   PetscFunctionBegin;
852   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
853 
854   if (n_local) *n_local = osm->n_local_true;
855   if (first_local) {
856     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
857     *first_local -= osm->n_local_true;
858   }
859   if (ksp) {
860     /* Assume that local solves are now different; not necessarily
861        true though!  This flag is used only for PCView_ASM() */
862     *ksp                   = osm->ksp;
863     osm->same_local_solves = PETSC_FALSE;
864   }
865   PetscFunctionReturn(0);
866 }
867 
868 #undef __FUNCT__
869 #define __FUNCT__ "PCASMSetLocalSubdomains"
870 /*@C
871     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
872 
873     Collective on PC
874 
875     Input Parameters:
876 +   pc - the preconditioner context
877 .   n - the number of subdomains for this processor (default value = 1)
878 .   is - the index set that defines the subdomains for this processor
879          (or NULL for PETSc to determine subdomains)
880 -   is_local - the index sets that define the local part of the subdomains for this processor
881          (or NULL to use the default of 1 subdomain per process)
882 
883     Notes:
884     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
885 
886     By default the ASM preconditioner uses 1 block per processor.
887 
888     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
889 
890     Level: advanced
891 
892 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
893 
894 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
895           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
896 @*/
897 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
898 {
899   PetscErrorCode ierr;
900 
901   PetscFunctionBegin;
902   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
903   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
904   PetscFunctionReturn(0);
905 }
906 
907 #undef __FUNCT__
908 #define __FUNCT__ "PCASMSetTotalSubdomains"
909 /*@C
910     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
911     additive Schwarz preconditioner.  Either all or no processors in the
912     PC communicator must call this routine, with the same index sets.
913 
914     Collective on PC
915 
916     Input Parameters:
917 +   pc - the preconditioner context
918 .   N  - the number of subdomains for all processors
919 .   is - the index sets that define the subdomains for all processors
920          (or NULL to ask PETSc to compe up with subdomains)
921 -   is_local - the index sets that define the local part of the subdomains for this processor
922          (or NULL to use the default of 1 subdomain per process)
923 
924     Options Database Key:
925     To set the total number of subdomain blocks rather than specify the
926     index sets, use the option
927 .    -pc_asm_blocks <blks> - Sets total blocks
928 
929     Notes:
930     Currently you cannot use this to set the actual subdomains with the argument is.
931 
932     By default the ASM preconditioner uses 1 block per processor.
933 
934     These index sets cannot be destroyed until after completion of the
935     linear solves for which the ASM preconditioner is being used.
936 
937     Use PCASMSetLocalSubdomains() to set local subdomains.
938 
939     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
940 
941     Level: advanced
942 
943 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
944 
945 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
946           PCASMCreateSubdomains2D()
947 @*/
948 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
949 {
950   PetscErrorCode ierr;
951 
952   PetscFunctionBegin;
953   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
954   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 #undef __FUNCT__
959 #define __FUNCT__ "PCASMSetOverlap"
960 /*@
961     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
962     additive Schwarz preconditioner.  Either all or no processors in the
963     PC communicator must call this routine. If MatIncreaseOverlap is used,
964     use option -mat_increase_overlap when the problem size large.
965 
966     Logically Collective on PC
967 
968     Input Parameters:
969 +   pc  - the preconditioner context
970 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
971 
972     Options Database Key:
973 .   -pc_asm_overlap <ovl> - Sets overlap
974 
975     Notes:
976     By default the ASM preconditioner uses 1 block per processor.  To use
977     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
978     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
979 
980     The overlap defaults to 1, so if one desires that no additional
981     overlap be computed beyond what may have been set with a call to
982     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
983     must be set to be 0.  In particular, if one does not explicitly set
984     the subdomains an application code, then all overlap would be computed
985     internally by PETSc, and using an overlap of 0 would result in an ASM
986     variant that is equivalent to the block Jacobi preconditioner.
987 
988     Note that one can define initial index sets with any overlap via
989     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
990     PCASMSetOverlap() merely allows PETSc to extend that overlap further
991     if desired.
992 
993     Level: intermediate
994 
995 .keywords: PC, ASM, set, overlap
996 
997 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
998           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
999 @*/
1000 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1001 {
1002   PetscErrorCode ierr;
1003 
1004   PetscFunctionBegin;
1005   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1006   PetscValidLogicalCollectiveInt(pc,ovl,2);
1007   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1008   PetscFunctionReturn(0);
1009 }
1010 
1011 #undef __FUNCT__
1012 #define __FUNCT__ "PCASMSetType"
1013 /*@
1014     PCASMSetType - Sets the type of restriction and interpolation used
1015     for local problems in the additive Schwarz method.
1016 
1017     Logically Collective on PC
1018 
1019     Input Parameters:
1020 +   pc  - the preconditioner context
1021 -   type - variant of ASM, one of
1022 .vb
1023       PC_ASM_BASIC       - full interpolation and restriction
1024       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1025       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1026       PC_ASM_NONE        - local processor restriction and interpolation
1027 .ve
1028 
1029     Options Database Key:
1030 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1031 
1032     Level: intermediate
1033 
1034 .keywords: PC, ASM, set, type
1035 
1036 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1037           PCASMCreateSubdomains2D()
1038 @*/
1039 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1040 {
1041   PetscErrorCode ierr;
1042 
1043   PetscFunctionBegin;
1044   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1045   PetscValidLogicalCollectiveEnum(pc,type,2);
1046   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 #undef __FUNCT__
1051 #define __FUNCT__ "PCASMGetType"
1052 /*@
1053     PCASMGetType - Gets the type of restriction and interpolation used
1054     for local problems in the additive Schwarz method.
1055 
1056     Logically Collective on PC
1057 
1058     Input Parameter:
1059 .   pc  - the preconditioner context
1060 
1061     Output Parameter:
1062 .   type - variant of ASM, one of
1063 
1064 .vb
1065       PC_ASM_BASIC       - full interpolation and restriction
1066       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1067       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1068       PC_ASM_NONE        - local processor restriction and interpolation
1069 .ve
1070 
1071     Options Database Key:
1072 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1073 
1074     Level: intermediate
1075 
1076 .keywords: PC, ASM, set, type
1077 
1078 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1079           PCASMCreateSubdomains2D()
1080 @*/
1081 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1082 {
1083   PetscErrorCode ierr;
1084 
1085   PetscFunctionBegin;
1086   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1087   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 #undef __FUNCT__
1092 #define __FUNCT__ "PCASMSetLocalType"
1093 /*@
1094   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1095 
1096   Logically Collective on PC
1097 
1098   Input Parameters:
1099 + pc  - the preconditioner context
1100 - type - type of composition, one of
1101 .vb
1102   PC_COMPOSITE_ADDITIVE       - local additive combination
1103   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1104 .ve
1105 
1106   Options Database Key:
1107 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1108 
1109   Level: intermediate
1110 
1111 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1112 @*/
1113 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1114 {
1115   PetscErrorCode ierr;
1116 
1117   PetscFunctionBegin;
1118   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1119   PetscValidLogicalCollectiveEnum(pc, type, 2);
1120   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1121   PetscFunctionReturn(0);
1122 }
1123 
1124 #undef __FUNCT__
1125 #define __FUNCT__ "PCASMGetLocalType"
1126 /*@
1127   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1128 
1129   Logically Collective on PC
1130 
1131   Input Parameter:
1132 . pc  - the preconditioner context
1133 
1134   Output Parameter:
1135 . type - type of composition, one of
1136 .vb
1137   PC_COMPOSITE_ADDITIVE       - local additive combination
1138   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1139 .ve
1140 
1141   Options Database Key:
1142 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1143 
1144   Level: intermediate
1145 
1146 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1147 @*/
1148 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1149 {
1150   PetscErrorCode ierr;
1151 
1152   PetscFunctionBegin;
1153   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1154   PetscValidPointer(type, 2);
1155   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1156   PetscFunctionReturn(0);
1157 }
1158 
1159 #undef __FUNCT__
1160 #define __FUNCT__ "PCASMSetSortIndices"
1161 /*@
1162     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1163 
1164     Logically Collective on PC
1165 
1166     Input Parameters:
1167 +   pc  - the preconditioner context
1168 -   doSort - sort the subdomain indices
1169 
1170     Level: intermediate
1171 
1172 .keywords: PC, ASM, set, type
1173 
1174 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1175           PCASMCreateSubdomains2D()
1176 @*/
1177 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1178 {
1179   PetscErrorCode ierr;
1180 
1181   PetscFunctionBegin;
1182   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1183   PetscValidLogicalCollectiveBool(pc,doSort,2);
1184   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1185   PetscFunctionReturn(0);
1186 }
1187 
1188 #undef __FUNCT__
1189 #define __FUNCT__ "PCASMGetSubKSP"
1190 /*@C
1191    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1192    this processor.
1193 
1194    Collective on PC iff first_local is requested
1195 
1196    Input Parameter:
1197 .  pc - the preconditioner context
1198 
1199    Output Parameters:
1200 +  n_local - the number of blocks on this processor or NULL
1201 .  first_local - the global number of the first block on this processor or NULL,
1202                  all processors must request or all must pass NULL
1203 -  ksp - the array of KSP contexts
1204 
1205    Note:
1206    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1207 
1208    Currently for some matrix implementations only 1 block per processor
1209    is supported.
1210 
1211    You must call KSPSetUp() before calling PCASMGetSubKSP().
1212 
1213    Fortran note:
1214    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
1215 
1216    Level: advanced
1217 
1218 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1219 
1220 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1221           PCASMCreateSubdomains2D(),
1222 @*/
1223 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1224 {
1225   PetscErrorCode ierr;
1226 
1227   PetscFunctionBegin;
1228   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1229   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1230   PetscFunctionReturn(0);
1231 }
1232 
1233 /* -------------------------------------------------------------------------------------*/
1234 /*MC
1235    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1236            its own KSP object.
1237 
1238    Options Database Keys:
1239 +  -pc_asm_blocks <blks> - Sets total blocks
1240 .  -pc_asm_overlap <ovl> - Sets overlap
1241 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1242 
1243      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1244       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1245       -pc_asm_type basic to use the standard ASM.
1246 
1247    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1248      than one processor. Defaults to one block per processor.
1249 
1250      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1251         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1252 
1253      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1254          and set the options directly on the resulting KSP object (you can access its PC
1255          with KSPGetPC())
1256 
1257 
1258    Level: beginner
1259 
1260    Concepts: additive Schwarz method
1261 
1262     References:
1263 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1264      Courant Institute, New York University Technical report
1265 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1266     Cambridge University Press.
1267 
1268 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1269            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1270            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1271 
1272 M*/
1273 
1274 #undef __FUNCT__
1275 #define __FUNCT__ "PCCreate_ASM"
1276 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1277 {
1278   PetscErrorCode ierr;
1279   PC_ASM         *osm;
1280 
1281   PetscFunctionBegin;
1282   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1283 
1284   osm->n                 = PETSC_DECIDE;
1285   osm->n_local           = 0;
1286   osm->n_local_true      = PETSC_DECIDE;
1287   osm->overlap           = 1;
1288   osm->ksp               = 0;
1289   osm->restriction       = 0;
1290   osm->localization      = 0;
1291   osm->prolongation      = 0;
1292   osm->x                 = 0;
1293   osm->y                 = 0;
1294   osm->y_local           = 0;
1295   osm->is                = 0;
1296   osm->is_local          = 0;
1297   osm->mat               = 0;
1298   osm->pmat              = 0;
1299   osm->type              = PC_ASM_RESTRICT;
1300   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1301   osm->same_local_solves = PETSC_TRUE;
1302   osm->sort_indices      = PETSC_TRUE;
1303   osm->dm_subdomains     = PETSC_FALSE;
1304 
1305   pc->data                 = (void*)osm;
1306   pc->ops->apply           = PCApply_ASM;
1307   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1308   pc->ops->setup           = PCSetUp_ASM;
1309   pc->ops->reset           = PCReset_ASM;
1310   pc->ops->destroy         = PCDestroy_ASM;
1311   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1312   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1313   pc->ops->view            = PCView_ASM;
1314   pc->ops->applyrichardson = 0;
1315 
1316   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1319   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1320   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1321   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1322   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1323   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1324   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1325   PetscFunctionReturn(0);
1326 }
1327 
1328 #undef __FUNCT__
1329 #define __FUNCT__ "PCASMCreateSubdomains"
1330 /*@C
1331    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1332    preconditioner for a any problem on a general grid.
1333 
1334    Collective
1335 
1336    Input Parameters:
1337 +  A - The global matrix operator
1338 -  n - the number of local blocks
1339 
1340    Output Parameters:
1341 .  outis - the array of index sets defining the subdomains
1342 
1343    Level: advanced
1344 
1345    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1346     from these if you use PCASMSetLocalSubdomains()
1347 
1348     In the Fortran version you must provide the array outis[] already allocated of length n.
1349 
1350 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1351 
1352 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1353 @*/
1354 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1355 {
1356   MatPartitioning mpart;
1357   const char      *prefix;
1358   PetscErrorCode  (*f)(Mat,Mat*);
1359   PetscMPIInt     size;
1360   PetscInt        i,j,rstart,rend,bs;
1361   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1362   Mat             Ad     = NULL, adj;
1363   IS              ispart,isnumb,*is;
1364   PetscErrorCode  ierr;
1365 
1366   PetscFunctionBegin;
1367   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1368   PetscValidPointer(outis,3);
1369   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1370 
1371   /* Get prefix, row distribution, and block size */
1372   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1373   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1374   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1375   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1376 
1377   /* Get diagonal block from matrix if possible */
1378   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1379   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1380   if (f) {
1381     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1382   } else if (size == 1) {
1383     Ad = A;
1384   }
1385   if (Ad) {
1386     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1387     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1388   }
1389   if (Ad && n > 1) {
1390     PetscBool match,done;
1391     /* Try to setup a good matrix partitioning if available */
1392     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1393     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1394     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1395     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1396     if (!match) {
1397       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1398     }
1399     if (!match) { /* assume a "good" partitioner is available */
1400       PetscInt       na;
1401       const PetscInt *ia,*ja;
1402       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1403       if (done) {
1404         /* Build adjacency matrix by hand. Unfortunately a call to
1405            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1406            remove the block-aij structure and we cannot expect
1407            MatPartitioning to split vertices as we need */
1408         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1409         const PetscInt *row;
1410         nnz = 0;
1411         for (i=0; i<na; i++) { /* count number of nonzeros */
1412           len = ia[i+1] - ia[i];
1413           row = ja + ia[i];
1414           for (j=0; j<len; j++) {
1415             if (row[j] == i) { /* don't count diagonal */
1416               len--; break;
1417             }
1418           }
1419           nnz += len;
1420         }
1421         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1422         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1423         nnz    = 0;
1424         iia[0] = 0;
1425         for (i=0; i<na; i++) { /* fill adjacency */
1426           cnt = 0;
1427           len = ia[i+1] - ia[i];
1428           row = ja + ia[i];
1429           for (j=0; j<len; j++) {
1430             if (row[j] != i) { /* if not diagonal */
1431               jja[nnz+cnt++] = row[j];
1432             }
1433           }
1434           nnz     += cnt;
1435           iia[i+1] = nnz;
1436         }
1437         /* Partitioning of the adjacency matrix */
1438         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1439         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1440         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1441         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1442         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1443         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1444         foundpart = PETSC_TRUE;
1445       }
1446       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1447     }
1448     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1449   }
1450 
1451   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1452   *outis = is;
1453 
1454   if (!foundpart) {
1455 
1456     /* Partitioning by contiguous chunks of rows */
1457 
1458     PetscInt mbs   = (rend-rstart)/bs;
1459     PetscInt start = rstart;
1460     for (i=0; i<n; i++) {
1461       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1462       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1463       start += count;
1464     }
1465 
1466   } else {
1467 
1468     /* Partitioning by adjacency of diagonal block  */
1469 
1470     const PetscInt *numbering;
1471     PetscInt       *count,nidx,*indices,*newidx,start=0;
1472     /* Get node count in each partition */
1473     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1474     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1475     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1476       for (i=0; i<n; i++) count[i] *= bs;
1477     }
1478     /* Build indices from node numbering */
1479     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1480     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1481     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1482     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1483     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1484     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1485     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1486       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1487       for (i=0; i<nidx; i++) {
1488         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1489       }
1490       ierr    = PetscFree(indices);CHKERRQ(ierr);
1491       nidx   *= bs;
1492       indices = newidx;
1493     }
1494     /* Shift to get global indices */
1495     for (i=0; i<nidx; i++) indices[i] += rstart;
1496 
1497     /* Build the index sets for each block */
1498     for (i=0; i<n; i++) {
1499       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1500       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1501       start += count[i];
1502     }
1503 
1504     ierr = PetscFree(count);CHKERRQ(ierr);
1505     ierr = PetscFree(indices);CHKERRQ(ierr);
1506     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1507     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1508 
1509   }
1510   PetscFunctionReturn(0);
1511 }
1512 
1513 #undef __FUNCT__
1514 #define __FUNCT__ "PCASMDestroySubdomains"
1515 /*@C
1516    PCASMDestroySubdomains - Destroys the index sets created with
1517    PCASMCreateSubdomains(). Should be called after setting subdomains
1518    with PCASMSetLocalSubdomains().
1519 
1520    Collective
1521 
1522    Input Parameters:
1523 +  n - the number of index sets
1524 .  is - the array of index sets
1525 -  is_local - the array of local index sets, can be NULL
1526 
1527    Level: advanced
1528 
1529 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1530 
1531 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1532 @*/
1533 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1534 {
1535   PetscInt       i;
1536   PetscErrorCode ierr;
1537 
1538   PetscFunctionBegin;
1539   if (n <= 0) PetscFunctionReturn(0);
1540   if (is) {
1541     PetscValidPointer(is,2);
1542     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1543     ierr = PetscFree(is);CHKERRQ(ierr);
1544   }
1545   if (is_local) {
1546     PetscValidPointer(is_local,3);
1547     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1548     ierr = PetscFree(is_local);CHKERRQ(ierr);
1549   }
1550   PetscFunctionReturn(0);
1551 }
1552 
1553 #undef __FUNCT__
1554 #define __FUNCT__ "PCASMCreateSubdomains2D"
1555 /*@
1556    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1557    preconditioner for a two-dimensional problem on a regular grid.
1558 
1559    Not Collective
1560 
1561    Input Parameters:
1562 +  m, n - the number of mesh points in the x and y directions
1563 .  M, N - the number of subdomains in the x and y directions
1564 .  dof - degrees of freedom per node
1565 -  overlap - overlap in mesh lines
1566 
1567    Output Parameters:
1568 +  Nsub - the number of subdomains created
1569 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1570 -  is_local - array of index sets defining non-overlapping subdomains
1571 
1572    Note:
1573    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1574    preconditioners.  More general related routines are
1575    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1576 
1577    Level: advanced
1578 
1579 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1580 
1581 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1582           PCASMSetOverlap()
1583 @*/
1584 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1585 {
1586   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1587   PetscErrorCode ierr;
1588   PetscInt       nidx,*idx,loc,ii,jj,count;
1589 
1590   PetscFunctionBegin;
1591   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1592 
1593   *Nsub     = N*M;
1594   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1595   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1596   ystart    = 0;
1597   loc_outer = 0;
1598   for (i=0; i<N; i++) {
1599     height = n/N + ((n % N) > i); /* height of subdomain */
1600     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1601     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1602     yright = ystart + height + overlap; if (yright > n) yright = n;
1603     xstart = 0;
1604     for (j=0; j<M; j++) {
1605       width = m/M + ((m % M) > j); /* width of subdomain */
1606       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1607       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1608       xright = xstart + width + overlap; if (xright > m) xright = m;
1609       nidx   = (xright - xleft)*(yright - yleft);
1610       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1611       loc    = 0;
1612       for (ii=yleft; ii<yright; ii++) {
1613         count = m*ii + xleft;
1614         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1615       }
1616       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1617       if (overlap == 0) {
1618         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1619 
1620         (*is_local)[loc_outer] = (*is)[loc_outer];
1621       } else {
1622         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1623           for (jj=xstart; jj<xstart+width; jj++) {
1624             idx[loc++] = m*ii + jj;
1625           }
1626         }
1627         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1628       }
1629       ierr    = PetscFree(idx);CHKERRQ(ierr);
1630       xstart += width;
1631       loc_outer++;
1632     }
1633     ystart += height;
1634   }
1635   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1636   PetscFunctionReturn(0);
1637 }
1638 
1639 #undef __FUNCT__
1640 #define __FUNCT__ "PCASMGetLocalSubdomains"
1641 /*@C
1642     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1643     only) for the additive Schwarz preconditioner.
1644 
1645     Not Collective
1646 
1647     Input Parameter:
1648 .   pc - the preconditioner context
1649 
1650     Output Parameters:
1651 +   n - the number of subdomains for this processor (default value = 1)
1652 .   is - the index sets that define the subdomains for this processor
1653 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1654 
1655 
1656     Notes:
1657     The IS numbering is in the parallel, global numbering of the vector.
1658 
1659     Level: advanced
1660 
1661 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1662 
1663 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1664           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1665 @*/
1666 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1667 {
1668   PC_ASM         *osm = (PC_ASM*)pc->data;
1669   PetscErrorCode ierr;
1670   PetscBool      match;
1671 
1672   PetscFunctionBegin;
1673   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1674   PetscValidIntPointer(n,2);
1675   if (is) PetscValidPointer(is,3);
1676   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1677   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1678   if (n) *n = osm->n_local_true;
1679   if (is) *is = osm->is;
1680   if (is_local) *is_local = osm->is_local;
1681   PetscFunctionReturn(0);
1682 }
1683 
1684 #undef __FUNCT__
1685 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1686 /*@C
1687     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1688     only) for the additive Schwarz preconditioner.
1689 
1690     Not Collective
1691 
1692     Input Parameter:
1693 .   pc - the preconditioner context
1694 
1695     Output Parameters:
1696 +   n - the number of matrices for this processor (default value = 1)
1697 -   mat - the matrices
1698 
1699 
1700     Level: advanced
1701 
1702     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1703 
1704            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1705 
1706 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1707 
1708 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1709           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1710 @*/
1711 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1712 {
1713   PC_ASM         *osm;
1714   PetscErrorCode ierr;
1715   PetscBool      match;
1716 
1717   PetscFunctionBegin;
1718   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1719   PetscValidIntPointer(n,2);
1720   if (mat) PetscValidPointer(mat,3);
1721   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1722   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1723   if (!match) {
1724     if (n) *n = 0;
1725     if (mat) *mat = NULL;
1726   } else {
1727     osm = (PC_ASM*)pc->data;
1728     if (n) *n = osm->n_local_true;
1729     if (mat) *mat = osm->pmat;
1730   }
1731   PetscFunctionReturn(0);
1732 }
1733 
1734 #undef __FUNCT__
1735 #define __FUNCT__ "PCASMSetDMSubdomains"
1736 /*@
1737     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1738     Logically Collective
1739 
1740     Input Parameter:
1741 +   pc  - the preconditioner
1742 -   flg - boolean indicating whether to use subdomains defined by the DM
1743 
1744     Options Database Key:
1745 .   -pc_asm_dm_subdomains
1746 
1747     Level: intermediate
1748 
1749     Notes:
1750     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1751     so setting either of the first two effectively turns the latter off.
1752 
1753 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1754 
1755 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1756           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1757 @*/
1758 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1759 {
1760   PC_ASM         *osm = (PC_ASM*)pc->data;
1761   PetscErrorCode ierr;
1762   PetscBool      match;
1763 
1764   PetscFunctionBegin;
1765   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1766   PetscValidLogicalCollectiveBool(pc,flg,2);
1767   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1768   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1769   if (match) {
1770     osm->dm_subdomains = flg;
1771   }
1772   PetscFunctionReturn(0);
1773 }
1774 
1775 #undef __FUNCT__
1776 #define __FUNCT__ "PCASMGetDMSubdomains"
1777 /*@
1778     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1779     Not Collective
1780 
1781     Input Parameter:
1782 .   pc  - the preconditioner
1783 
1784     Output Parameter:
1785 .   flg - boolean indicating whether to use subdomains defined by the DM
1786 
1787     Level: intermediate
1788 
1789 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1790 
1791 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1792           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1793 @*/
1794 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1795 {
1796   PC_ASM         *osm = (PC_ASM*)pc->data;
1797   PetscErrorCode ierr;
1798   PetscBool      match;
1799 
1800   PetscFunctionBegin;
1801   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1802   PetscValidPointer(flg,2);
1803   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1804   if (match) {
1805     if (flg) *flg = osm->dm_subdomains;
1806   }
1807   PetscFunctionReturn(0);
1808 }
1809