xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 3318b45102ce0047a00bb4b47faed7cf01a05319)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   /* For multiplicative solve */
34   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
35   Vec        lx, ly;              /* work vectors */
36   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "PCView_ASM"
41 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42 {
43   PC_ASM         *osm = (PC_ASM*)pc->data;
44   PetscErrorCode ierr;
45   PetscMPIInt    rank;
46   PetscInt       i,bsz;
47   PetscBool      iascii,isstring;
48   PetscViewer    sviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
56     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
57     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
59     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: use DM subdomains - %d\n",osm->dm_subdomains);CHKERRQ(ierr);
60     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
61     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
62     if (osm->same_local_solves) {
63       if (osm->ksp) {
64         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
65         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
66         if (!rank) {
67           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
68           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
69           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
70         }
71         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
72       }
73     } else {
74       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
75       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
76       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
79       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
81       for (i=0; i<osm->n_local_true; i++) {
82         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
83         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
84         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
85         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
86       }
87       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
88       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
91     }
92   } else if (isstring) {
93     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
94     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
95     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
96     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
97   }
98   PetscFunctionReturn(0);
99 }
100 
101 #undef __FUNCT__
102 #define __FUNCT__ "PCASMPrintSubdomains"
103 static PetscErrorCode PCASMPrintSubdomains(PC pc)
104 {
105   PC_ASM         *osm = (PC_ASM*)pc->data;
106   const char     *prefix;
107   char           fname[PETSC_MAX_PATH_LEN+1];
108   PetscViewer    viewer, sviewer;
109   char           *s;
110   PetscInt       i,j,nidx;
111   const PetscInt *idx;
112   PetscMPIInt    rank, size;
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
117   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
118   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
119   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
120   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
121   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
122   for (i=0; i<osm->n_local; i++) {
123     if (i < osm->n_local_true) {
124       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
125       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
126       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
127       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
128       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
129       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
130       for (j=0; j<nidx; j++) {
131         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
132       }
133       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
134       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
135       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
138       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
139       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
140       ierr = PetscFree(s);CHKERRQ(ierr);
141       if (osm->is_local) {
142         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
143         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
144         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
145         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
146         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
147         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
148         for (j=0; j<nidx; j++) {
149           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
150         }
151         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
152         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
153         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
156         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
157         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
158         ierr = PetscFree(s);CHKERRQ(ierr);
159       }
160     } else {
161       /* Participate in collective viewer calls. */
162       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
164       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
165       /* Assume either all ranks have is_local or none do. */
166       if (osm->is_local) {
167         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
169         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
170       }
171     }
172   }
173   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
174   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
175   PetscFunctionReturn(0);
176 }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "PCSetUp_ASM"
180 static PetscErrorCode PCSetUp_ASM(PC pc)
181 {
182   PC_ASM         *osm = (PC_ASM*)pc->data;
183   PetscErrorCode ierr;
184   PetscBool      symset,flg;
185   PetscInt       i,m,m_local;
186   MatReuse       scall = MAT_REUSE_MATRIX;
187   IS             isl;
188   KSP            ksp;
189   PC             subpc;
190   const char     *prefix,*pprefix;
191   Vec            vec;
192   DM             *domain_dm = NULL;
193 
194   PetscFunctionBegin;
195   if (!pc->setupcalled) {
196 
197     if (!osm->type_set) {
198       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
199       if (symset && flg) osm->type = PC_ASM_BASIC;
200     }
201 
202     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
203     if (osm->n_local_true == PETSC_DECIDE) {
204       /* no subdomains given */
205       /* try pc->dm first, if allowed */
206       if (osm->dm_subdomains && pc->dm) {
207         PetscInt  num_domains, d;
208         char      **domain_names;
209         IS        *inner_domain_is, *outer_domain_is;
210         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
211         if (num_domains) {
212           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
213         }
214         for (d = 0; d < num_domains; ++d) {
215           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
216           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
217           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
218         }
219         ierr = PetscFree(domain_names);CHKERRQ(ierr);
220         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
221         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
222       }
223       if (osm->n_local_true == PETSC_DECIDE) {
224         /* still no subdomains; use one subdomain per processor */
225         osm->n_local_true = 1;
226       }
227     }
228     { /* determine the global and max number of subdomains */
229       struct {PetscInt max,sum;} inwork,outwork;
230       inwork.max   = osm->n_local_true;
231       inwork.sum   = osm->n_local_true;
232       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
233       osm->n_local = outwork.max;
234       osm->n       = outwork.sum;
235     }
236     if (!osm->is) { /* create the index sets */
237       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
238     }
239     if (osm->n_local_true > 1 && !osm->is_local) {
240       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
241       for (i=0; i<osm->n_local_true; i++) {
242         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
243           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
244           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
245         } else {
246           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
247           osm->is_local[i] = osm->is[i];
248         }
249       }
250     }
251     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
252     flg  = PETSC_FALSE;
253     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
254     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
255 
256     if (osm->overlap > 0) {
257       /* Extend the "overlapping" regions by a number of steps */
258       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
259     }
260     if (osm->sort_indices) {
261       for (i=0; i<osm->n_local_true; i++) {
262         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
263         if (osm->is_local) {
264           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
265         }
266       }
267     }
268     /* Create the local work vectors and scatter contexts */
269     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
270     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
271     if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
272     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
273     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
274     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
275     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
276     for (i=0; i<osm->n_local_true; ++i) {
277       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
278       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
279       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
280       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
281       ierr = ISDestroy(&isl);CHKERRQ(ierr);
282       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
283       if (osm->is_local) {
284         ISLocalToGlobalMapping ltog;
285         IS                     isll;
286         const PetscInt         *idx_local;
287         PetscInt               *idx,nout;
288 
289         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
290         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
291         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
292         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
293         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
294         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
295         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
296         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
297         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
298         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
299         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
300         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
301         ierr = ISDestroy(&isll);CHKERRQ(ierr);
302 
303         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
304         ierr = ISDestroy(&isl);CHKERRQ(ierr);
305       } else {
306         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
307 
308         osm->y_local[i] = osm->y[i];
309 
310         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
311 
312         osm->prolongation[i] = osm->restriction[i];
313 
314         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
315       }
316     }
317     for (i=osm->n_local_true; i<osm->n_local; i++) {
318       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
319       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
320       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
321       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
322       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
323       if (osm->is_local) {
324         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
325         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
326       } else {
327         osm->prolongation[i] = osm->restriction[i];
328         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
329       }
330       ierr = ISDestroy(&isl);CHKERRQ(ierr);
331     }
332     ierr = VecDestroy(&vec);CHKERRQ(ierr);
333 
334     if (!osm->ksp) {
335       /* Create the local solvers */
336       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
337       if (domain_dm) {
338         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
339       }
340       for (i=0; i<osm->n_local_true; i++) {
341         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
342         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
343         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
344         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
345         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
346         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
347         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
348         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
349         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
350         if (domain_dm) {
351           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
352           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
353           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
354         }
355         osm->ksp[i] = ksp;
356       }
357       if (domain_dm) {
358         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
359       }
360     }
361     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
362       PetscInt m;
363 
364       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
365       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
366       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
367       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
368       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
369     }
370     scall = MAT_INITIAL_MATRIX;
371   } else {
372     /*
373        Destroy the blocks from the previous iteration
374     */
375     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
376       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
377       scall = MAT_INITIAL_MATRIX;
378     }
379   }
380 
381   /*
382      Extract out the submatrices
383   */
384   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
385   if (scall == MAT_INITIAL_MATRIX) {
386     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
387     for (i=0; i<osm->n_local_true; i++) {
388       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
389       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
390     }
391   }
392   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
393     IS      *cis;
394     PetscInt c;
395 
396     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
397     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
398     ierr = MatGetSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
399     ierr = PetscFree(cis);CHKERRQ(ierr);
400   }
401 
402   /* Return control to the user so that the submatrices can be modified (e.g., to apply
403      different boundary conditions for the submatrices than for the global problem) */
404   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
405 
406   /*
407      Loop over subdomains putting them into local ksp
408   */
409   for (i=0; i<osm->n_local_true; i++) {
410     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
411     if (!pc->setupcalled) {
412       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
413     }
414   }
415   PetscFunctionReturn(0);
416 }
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
420 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
421 {
422   PC_ASM             *osm = (PC_ASM*)pc->data;
423   PetscErrorCode     ierr;
424   PetscInt           i;
425   KSPConvergedReason reason;
426 
427   PetscFunctionBegin;
428   for (i=0; i<osm->n_local_true; i++) {
429     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
430     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
431     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
432       pc->failedreason = PC_SUBPC_ERROR;
433     }
434   }
435   PetscFunctionReturn(0);
436 }
437 
438 #undef __FUNCT__
439 #define __FUNCT__ "PCApply_ASM"
440 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
441 {
442   PC_ASM         *osm = (PC_ASM*)pc->data;
443   PetscErrorCode ierr;
444   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
445   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
446 
447   PetscFunctionBegin;
448   /*
449      Support for limiting the restriction or interpolation to only local
450      subdomain values (leaving the other values 0).
451   */
452   if (!(osm->type & PC_ASM_RESTRICT)) {
453     forward = SCATTER_FORWARD_LOCAL;
454     /* have to zero the work RHS since scatter may leave some slots empty */
455     for (i=0; i<n_local_true; i++) {
456       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
457     }
458   }
459   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
460 
461   switch (osm->loctype)
462   {
463   case PC_COMPOSITE_ADDITIVE:
464     for (i=0; i<n_local; i++) {
465       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
466     }
467     ierr = VecZeroEntries(y);CHKERRQ(ierr);
468     /* do the local solves */
469     for (i=0; i<n_local_true; i++) {
470       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
471       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
472       if (osm->localization) {
473         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
474         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
475       }
476       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
477     }
478     /* handle the rest of the scatters that do not have local solves */
479     for (i=n_local_true; i<n_local; i++) {
480       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
481       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
482     }
483     for (i=0; i<n_local; i++) {
484       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
485     }
486     break;
487   case PC_COMPOSITE_MULTIPLICATIVE:
488     ierr = VecZeroEntries(y);CHKERRQ(ierr);
489     /* do the local solves */
490     for (i = 0; i < n_local_true; ++i) {
491       if (i > 0) {
492         /* Update rhs */
493         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
494         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
495       } else {
496         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
497       }
498       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
499       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
500       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
501       if (osm->localization) {
502         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
503         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
504       }
505       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
506       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
507       if (i < n_local_true-1) {
508         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
509         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
510         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
511         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
512         ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
513         ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
514         ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr);
515       }
516     }
517     /* handle the rest of the scatters that do not have local solves */
518     for (i = n_local_true; i < n_local; ++i) {
519       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
520       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
521       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
522       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
523     }
524     break;
525   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
526   }
527   PetscFunctionReturn(0);
528 }
529 
530 #undef __FUNCT__
531 #define __FUNCT__ "PCApplyTranspose_ASM"
532 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
533 {
534   PC_ASM         *osm = (PC_ASM*)pc->data;
535   PetscErrorCode ierr;
536   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
537   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
538 
539   PetscFunctionBegin;
540   /*
541      Support for limiting the restriction or interpolation to only local
542      subdomain values (leaving the other values 0).
543 
544      Note: these are reversed from the PCApply_ASM() because we are applying the
545      transpose of the three terms
546   */
547   if (!(osm->type & PC_ASM_INTERPOLATE)) {
548     forward = SCATTER_FORWARD_LOCAL;
549     /* have to zero the work RHS since scatter may leave some slots empty */
550     for (i=0; i<n_local_true; i++) {
551       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
552     }
553   }
554   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
555 
556   for (i=0; i<n_local; i++) {
557     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
558   }
559   ierr = VecZeroEntries(y);CHKERRQ(ierr);
560   /* do the local solves */
561   for (i=0; i<n_local_true; i++) {
562     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
563     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
564     if (osm->localization) {
565       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
566       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
567     }
568     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
569   }
570   /* handle the rest of the scatters that do not have local solves */
571   for (i=n_local_true; i<n_local; i++) {
572     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
573     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
574   }
575   for (i=0; i<n_local; i++) {
576     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
577   }
578   PetscFunctionReturn(0);
579 }
580 
581 #undef __FUNCT__
582 #define __FUNCT__ "PCReset_ASM"
583 static PetscErrorCode PCReset_ASM(PC pc)
584 {
585   PC_ASM         *osm = (PC_ASM*)pc->data;
586   PetscErrorCode ierr;
587   PetscInt       i;
588 
589   PetscFunctionBegin;
590   if (osm->ksp) {
591     for (i=0; i<osm->n_local_true; i++) {
592       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
593     }
594   }
595   if (osm->pmat) {
596     if (osm->n_local_true > 0) {
597       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
598     }
599   }
600   if (osm->restriction) {
601     for (i=0; i<osm->n_local; i++) {
602       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
603       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
604       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
605       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
606       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
607       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
608     }
609     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
610     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
611     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
612     ierr = PetscFree(osm->x);CHKERRQ(ierr);
613     ierr = PetscFree(osm->y);CHKERRQ(ierr);
614     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
615   }
616   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
617   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
618     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
619     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
620     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
621     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
622   }
623 
624   osm->is       = 0;
625   osm->is_local = 0;
626   PetscFunctionReturn(0);
627 }
628 
629 #undef __FUNCT__
630 #define __FUNCT__ "PCDestroy_ASM"
631 static PetscErrorCode PCDestroy_ASM(PC pc)
632 {
633   PC_ASM         *osm = (PC_ASM*)pc->data;
634   PetscErrorCode ierr;
635   PetscInt       i;
636 
637   PetscFunctionBegin;
638   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
639   if (osm->ksp) {
640     for (i=0; i<osm->n_local_true; i++) {
641       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
642     }
643     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
644   }
645   ierr = PetscFree(pc->data);CHKERRQ(ierr);
646   PetscFunctionReturn(0);
647 }
648 
649 #undef __FUNCT__
650 #define __FUNCT__ "PCSetFromOptions_ASM"
651 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
652 {
653   PC_ASM         *osm = (PC_ASM*)pc->data;
654   PetscErrorCode ierr;
655   PetscInt       blocks,ovl;
656   PetscBool      symset,flg;
657   PCASMType      asmtype;
658   PCCompositeType loctype;
659 
660   PetscFunctionBegin;
661   /* set the type to symmetric if matrix is symmetric */
662   if (!osm->type_set && pc->pmat) {
663     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
664     if (symset && flg) osm->type = PC_ASM_BASIC;
665   }
666   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
667   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
668   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
669   if (flg) {
670     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
671     osm->dm_subdomains = PETSC_FALSE;
672   }
673   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
674   if (flg) {
675     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
676     osm->dm_subdomains = PETSC_FALSE;
677   }
678   flg  = PETSC_FALSE;
679   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
680   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
681   flg  = PETSC_FALSE;
682   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
683   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
684   ierr = PetscOptionsTail();CHKERRQ(ierr);
685   PetscFunctionReturn(0);
686 }
687 
688 /*------------------------------------------------------------------------------------*/
689 
690 #undef __FUNCT__
691 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
692 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
693 {
694   PC_ASM         *osm = (PC_ASM*)pc->data;
695   PetscErrorCode ierr;
696   PetscInt       i;
697 
698   PetscFunctionBegin;
699   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
700   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
701 
702   if (!pc->setupcalled) {
703     if (is) {
704       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
705     }
706     if (is_local) {
707       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
708     }
709     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
710 
711     osm->n_local_true = n;
712     osm->is           = 0;
713     osm->is_local     = 0;
714     if (is) {
715       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
716       for (i=0; i<n; i++) osm->is[i] = is[i];
717       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
718       osm->overlap = -1;
719     }
720     if (is_local) {
721       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
722       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
723       if (!is) {
724         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
725         for (i=0; i<osm->n_local_true; i++) {
726           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
727             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
728             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
729           } else {
730             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
731             osm->is[i] = osm->is_local[i];
732           }
733         }
734       }
735     }
736   }
737   PetscFunctionReturn(0);
738 }
739 
740 #undef __FUNCT__
741 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
742 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
743 {
744   PC_ASM         *osm = (PC_ASM*)pc->data;
745   PetscErrorCode ierr;
746   PetscMPIInt    rank,size;
747   PetscInt       n;
748 
749   PetscFunctionBegin;
750   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
751   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
752 
753   /*
754      Split the subdomains equally among all processors
755   */
756   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
757   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
758   n    = N/size + ((N % size) > rank);
759   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
760   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
761   if (!pc->setupcalled) {
762     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
763 
764     osm->n_local_true = n;
765     osm->is           = 0;
766     osm->is_local     = 0;
767   }
768   PetscFunctionReturn(0);
769 }
770 
771 #undef __FUNCT__
772 #define __FUNCT__ "PCASMSetOverlap_ASM"
773 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
774 {
775   PC_ASM *osm = (PC_ASM*)pc->data;
776 
777   PetscFunctionBegin;
778   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
779   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
780   if (!pc->setupcalled) osm->overlap = ovl;
781   PetscFunctionReturn(0);
782 }
783 
784 #undef __FUNCT__
785 #define __FUNCT__ "PCASMSetType_ASM"
786 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
787 {
788   PC_ASM *osm = (PC_ASM*)pc->data;
789 
790   PetscFunctionBegin;
791   osm->type     = type;
792   osm->type_set = PETSC_TRUE;
793   PetscFunctionReturn(0);
794 }
795 
796 #undef __FUNCT__
797 #define __FUNCT__ "PCASMGetType_ASM"
798 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
799 {
800   PC_ASM *osm = (PC_ASM*)pc->data;
801 
802   PetscFunctionBegin;
803   *type = osm->type;
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "PCASMSetLocalType_ASM"
809 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
810 {
811   PC_ASM *osm = (PC_ASM *) pc->data;
812 
813   PetscFunctionBegin;
814   osm->loctype = type;
815   PetscFunctionReturn(0);
816 }
817 
818 #undef __FUNCT__
819 #define __FUNCT__ "PCASMGetLocalType_ASM"
820 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
821 {
822   PC_ASM *osm = (PC_ASM *) pc->data;
823 
824   PetscFunctionBegin;
825   *type = osm->loctype;
826   PetscFunctionReturn(0);
827 }
828 
829 #undef __FUNCT__
830 #define __FUNCT__ "PCASMSetSortIndices_ASM"
831 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
832 {
833   PC_ASM *osm = (PC_ASM*)pc->data;
834 
835   PetscFunctionBegin;
836   osm->sort_indices = doSort;
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "PCASMGetSubKSP_ASM"
842 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
843 {
844   PC_ASM         *osm = (PC_ASM*)pc->data;
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
849 
850   if (n_local) *n_local = osm->n_local_true;
851   if (first_local) {
852     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
853     *first_local -= osm->n_local_true;
854   }
855   if (ksp) {
856     /* Assume that local solves are now different; not necessarily
857        true though!  This flag is used only for PCView_ASM() */
858     *ksp                   = osm->ksp;
859     osm->same_local_solves = PETSC_FALSE;
860   }
861   PetscFunctionReturn(0);
862 }
863 
864 #undef __FUNCT__
865 #define __FUNCT__ "PCASMSetLocalSubdomains"
866 /*@C
867     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
868 
869     Collective on PC
870 
871     Input Parameters:
872 +   pc - the preconditioner context
873 .   n - the number of subdomains for this processor (default value = 1)
874 .   is - the index set that defines the subdomains for this processor
875          (or NULL for PETSc to determine subdomains)
876 -   is_local - the index sets that define the local part of the subdomains for this processor
877          (or NULL to use the default of 1 subdomain per process)
878 
879     Notes:
880     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
881 
882     By default the ASM preconditioner uses 1 block per processor.
883 
884     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
885 
886     Level: advanced
887 
888 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
889 
890 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
891           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
892 @*/
893 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
899   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
900   PetscFunctionReturn(0);
901 }
902 
903 #undef __FUNCT__
904 #define __FUNCT__ "PCASMSetTotalSubdomains"
905 /*@C
906     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
907     additive Schwarz preconditioner.  Either all or no processors in the
908     PC communicator must call this routine, with the same index sets.
909 
910     Collective on PC
911 
912     Input Parameters:
913 +   pc - the preconditioner context
914 .   N  - the number of subdomains for all processors
915 .   is - the index sets that define the subdomains for all processors
916          (or NULL to ask PETSc to compe up with subdomains)
917 -   is_local - the index sets that define the local part of the subdomains for this processor
918          (or NULL to use the default of 1 subdomain per process)
919 
920     Options Database Key:
921     To set the total number of subdomain blocks rather than specify the
922     index sets, use the option
923 .    -pc_asm_blocks <blks> - Sets total blocks
924 
925     Notes:
926     Currently you cannot use this to set the actual subdomains with the argument is.
927 
928     By default the ASM preconditioner uses 1 block per processor.
929 
930     These index sets cannot be destroyed until after completion of the
931     linear solves for which the ASM preconditioner is being used.
932 
933     Use PCASMSetLocalSubdomains() to set local subdomains.
934 
935     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
936 
937     Level: advanced
938 
939 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
940 
941 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
942           PCASMCreateSubdomains2D()
943 @*/
944 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
945 {
946   PetscErrorCode ierr;
947 
948   PetscFunctionBegin;
949   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
950   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 #undef __FUNCT__
955 #define __FUNCT__ "PCASMSetOverlap"
956 /*@
957     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
958     additive Schwarz preconditioner.  Either all or no processors in the
959     PC communicator must call this routine. If MatIncreaseOverlap is used,
960     use option -mat_increase_overlap when the problem size large.
961 
962     Logically Collective on PC
963 
964     Input Parameters:
965 +   pc  - the preconditioner context
966 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
967 
968     Options Database Key:
969 .   -pc_asm_overlap <ovl> - Sets overlap
970 
971     Notes:
972     By default the ASM preconditioner uses 1 block per processor.  To use
973     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
974     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
975 
976     The overlap defaults to 1, so if one desires that no additional
977     overlap be computed beyond what may have been set with a call to
978     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
979     must be set to be 0.  In particular, if one does not explicitly set
980     the subdomains an application code, then all overlap would be computed
981     internally by PETSc, and using an overlap of 0 would result in an ASM
982     variant that is equivalent to the block Jacobi preconditioner.
983 
984     Note that one can define initial index sets with any overlap via
985     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
986     PCASMSetOverlap() merely allows PETSc to extend that overlap further
987     if desired.
988 
989     Level: intermediate
990 
991 .keywords: PC, ASM, set, overlap
992 
993 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
994           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
995 @*/
996 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
997 {
998   PetscErrorCode ierr;
999 
1000   PetscFunctionBegin;
1001   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1002   PetscValidLogicalCollectiveInt(pc,ovl,2);
1003   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1004   PetscFunctionReturn(0);
1005 }
1006 
1007 #undef __FUNCT__
1008 #define __FUNCT__ "PCASMSetType"
1009 /*@
1010     PCASMSetType - Sets the type of restriction and interpolation used
1011     for local problems in the additive Schwarz method.
1012 
1013     Logically Collective on PC
1014 
1015     Input Parameters:
1016 +   pc  - the preconditioner context
1017 -   type - variant of ASM, one of
1018 .vb
1019       PC_ASM_BASIC       - full interpolation and restriction
1020       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1021       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1022       PC_ASM_NONE        - local processor restriction and interpolation
1023 .ve
1024 
1025     Options Database Key:
1026 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1027 
1028     Level: intermediate
1029 
1030 .keywords: PC, ASM, set, type
1031 
1032 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1033           PCASMCreateSubdomains2D()
1034 @*/
1035 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1036 {
1037   PetscErrorCode ierr;
1038 
1039   PetscFunctionBegin;
1040   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1041   PetscValidLogicalCollectiveEnum(pc,type,2);
1042   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1043   PetscFunctionReturn(0);
1044 }
1045 
1046 #undef __FUNCT__
1047 #define __FUNCT__ "PCASMGetType"
1048 /*@
1049     PCASMGetType - Gets the type of restriction and interpolation used
1050     for local problems in the additive Schwarz method.
1051 
1052     Logically Collective on PC
1053 
1054     Input Parameter:
1055 .   pc  - the preconditioner context
1056 
1057     Output Parameter:
1058 .   type - variant of ASM, one of
1059 
1060 .vb
1061       PC_ASM_BASIC       - full interpolation and restriction
1062       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1063       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1064       PC_ASM_NONE        - local processor restriction and interpolation
1065 .ve
1066 
1067     Options Database Key:
1068 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1069 
1070     Level: intermediate
1071 
1072 .keywords: PC, ASM, set, type
1073 
1074 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1075           PCASMCreateSubdomains2D()
1076 @*/
1077 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1078 {
1079   PetscErrorCode ierr;
1080 
1081   PetscFunctionBegin;
1082   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1083   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1084   PetscFunctionReturn(0);
1085 }
1086 
1087 #undef __FUNCT__
1088 #define __FUNCT__ "PCASMSetLocalType"
1089 /*@
1090   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1091 
1092   Logically Collective on PC
1093 
1094   Input Parameters:
1095 + pc  - the preconditioner context
1096 - type - type of composition, one of
1097 .vb
1098   PC_COMPOSITE_ADDITIVE       - local additive combination
1099   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1100 .ve
1101 
1102   Options Database Key:
1103 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1104 
1105   Level: intermediate
1106 
1107 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1108 @*/
1109 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1110 {
1111   PetscErrorCode ierr;
1112 
1113   PetscFunctionBegin;
1114   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1115   PetscValidLogicalCollectiveEnum(pc, type, 2);
1116   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 #undef __FUNCT__
1121 #define __FUNCT__ "PCASMGetLocalType"
1122 /*@
1123   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1124 
1125   Logically Collective on PC
1126 
1127   Input Parameter:
1128 . pc  - the preconditioner context
1129 
1130   Output Parameter:
1131 . type - type of composition, one of
1132 .vb
1133   PC_COMPOSITE_ADDITIVE       - local additive combination
1134   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1135 .ve
1136 
1137   Options Database Key:
1138 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1139 
1140   Level: intermediate
1141 
1142 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1143 @*/
1144 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1145 {
1146   PetscErrorCode ierr;
1147 
1148   PetscFunctionBegin;
1149   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1150   PetscValidPointer(type, 2);
1151   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 #undef __FUNCT__
1156 #define __FUNCT__ "PCASMSetSortIndices"
1157 /*@
1158     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1159 
1160     Logically Collective on PC
1161 
1162     Input Parameters:
1163 +   pc  - the preconditioner context
1164 -   doSort - sort the subdomain indices
1165 
1166     Level: intermediate
1167 
1168 .keywords: PC, ASM, set, type
1169 
1170 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1171           PCASMCreateSubdomains2D()
1172 @*/
1173 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1174 {
1175   PetscErrorCode ierr;
1176 
1177   PetscFunctionBegin;
1178   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1179   PetscValidLogicalCollectiveBool(pc,doSort,2);
1180   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1181   PetscFunctionReturn(0);
1182 }
1183 
1184 #undef __FUNCT__
1185 #define __FUNCT__ "PCASMGetSubKSP"
1186 /*@C
1187    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1188    this processor.
1189 
1190    Collective on PC iff first_local is requested
1191 
1192    Input Parameter:
1193 .  pc - the preconditioner context
1194 
1195    Output Parameters:
1196 +  n_local - the number of blocks on this processor or NULL
1197 .  first_local - the global number of the first block on this processor or NULL,
1198                  all processors must request or all must pass NULL
1199 -  ksp - the array of KSP contexts
1200 
1201    Note:
1202    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1203 
1204    Currently for some matrix implementations only 1 block per processor
1205    is supported.
1206 
1207    You must call KSPSetUp() before calling PCASMGetSubKSP().
1208 
1209    Fortran note:
1210    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
1211 
1212    Level: advanced
1213 
1214 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1215 
1216 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1217           PCASMCreateSubdomains2D(),
1218 @*/
1219 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1220 {
1221   PetscErrorCode ierr;
1222 
1223   PetscFunctionBegin;
1224   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1225   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1226   PetscFunctionReturn(0);
1227 }
1228 
1229 /* -------------------------------------------------------------------------------------*/
1230 /*MC
1231    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1232            its own KSP object.
1233 
1234    Options Database Keys:
1235 +  -pc_asm_blocks <blks> - Sets total blocks
1236 .  -pc_asm_overlap <ovl> - Sets overlap
1237 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1238 
1239      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1240       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1241       -pc_asm_type basic to use the standard ASM.
1242 
1243    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1244      than one processor. Defaults to one block per processor.
1245 
1246      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1247         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1248 
1249      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1250          and set the options directly on the resulting KSP object (you can access its PC
1251          with KSPGetPC())
1252 
1253 
1254    Level: beginner
1255 
1256    Concepts: additive Schwarz method
1257 
1258     References:
1259 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1260      Courant Institute, New York University Technical report
1261 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1262     Cambridge University Press.
1263 
1264 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1265            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1266            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1267 
1268 M*/
1269 
1270 #undef __FUNCT__
1271 #define __FUNCT__ "PCCreate_ASM"
1272 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1273 {
1274   PetscErrorCode ierr;
1275   PC_ASM         *osm;
1276 
1277   PetscFunctionBegin;
1278   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1279 
1280   osm->n                 = PETSC_DECIDE;
1281   osm->n_local           = 0;
1282   osm->n_local_true      = PETSC_DECIDE;
1283   osm->overlap           = 1;
1284   osm->ksp               = 0;
1285   osm->restriction       = 0;
1286   osm->localization      = 0;
1287   osm->prolongation      = 0;
1288   osm->x                 = 0;
1289   osm->y                 = 0;
1290   osm->y_local           = 0;
1291   osm->is                = 0;
1292   osm->is_local          = 0;
1293   osm->mat               = 0;
1294   osm->pmat              = 0;
1295   osm->type              = PC_ASM_RESTRICT;
1296   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1297   osm->same_local_solves = PETSC_TRUE;
1298   osm->sort_indices      = PETSC_TRUE;
1299   osm->dm_subdomains     = PETSC_FALSE;
1300 
1301   pc->data                 = (void*)osm;
1302   pc->ops->apply           = PCApply_ASM;
1303   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1304   pc->ops->setup           = PCSetUp_ASM;
1305   pc->ops->reset           = PCReset_ASM;
1306   pc->ops->destroy         = PCDestroy_ASM;
1307   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1308   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1309   pc->ops->view            = PCView_ASM;
1310   pc->ops->applyrichardson = 0;
1311 
1312   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1313   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1314   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1315   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1316   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1319   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1320   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1321   PetscFunctionReturn(0);
1322 }
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "PCASMCreateSubdomains"
1326 /*@C
1327    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1328    preconditioner for a any problem on a general grid.
1329 
1330    Collective
1331 
1332    Input Parameters:
1333 +  A - The global matrix operator
1334 -  n - the number of local blocks
1335 
1336    Output Parameters:
1337 .  outis - the array of index sets defining the subdomains
1338 
1339    Level: advanced
1340 
1341    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1342     from these if you use PCASMSetLocalSubdomains()
1343 
1344     In the Fortran version you must provide the array outis[] already allocated of length n.
1345 
1346 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1347 
1348 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1349 @*/
1350 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1351 {
1352   MatPartitioning mpart;
1353   const char      *prefix;
1354   PetscErrorCode  (*f)(Mat,Mat*);
1355   PetscMPIInt     size;
1356   PetscInt        i,j,rstart,rend,bs;
1357   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1358   Mat             Ad     = NULL, adj;
1359   IS              ispart,isnumb,*is;
1360   PetscErrorCode  ierr;
1361 
1362   PetscFunctionBegin;
1363   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1364   PetscValidPointer(outis,3);
1365   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1366 
1367   /* Get prefix, row distribution, and block size */
1368   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1369   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1370   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1371   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1372 
1373   /* Get diagonal block from matrix if possible */
1374   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1375   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1376   if (f) {
1377     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1378   } else if (size == 1) {
1379     Ad = A;
1380   }
1381   if (Ad) {
1382     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1383     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1384   }
1385   if (Ad && n > 1) {
1386     PetscBool match,done;
1387     /* Try to setup a good matrix partitioning if available */
1388     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1389     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1390     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1391     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1392     if (!match) {
1393       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1394     }
1395     if (!match) { /* assume a "good" partitioner is available */
1396       PetscInt       na;
1397       const PetscInt *ia,*ja;
1398       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1399       if (done) {
1400         /* Build adjacency matrix by hand. Unfortunately a call to
1401            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1402            remove the block-aij structure and we cannot expect
1403            MatPartitioning to split vertices as we need */
1404         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1405         const PetscInt *row;
1406         nnz = 0;
1407         for (i=0; i<na; i++) { /* count number of nonzeros */
1408           len = ia[i+1] - ia[i];
1409           row = ja + ia[i];
1410           for (j=0; j<len; j++) {
1411             if (row[j] == i) { /* don't count diagonal */
1412               len--; break;
1413             }
1414           }
1415           nnz += len;
1416         }
1417         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1418         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1419         nnz    = 0;
1420         iia[0] = 0;
1421         for (i=0; i<na; i++) { /* fill adjacency */
1422           cnt = 0;
1423           len = ia[i+1] - ia[i];
1424           row = ja + ia[i];
1425           for (j=0; j<len; j++) {
1426             if (row[j] != i) { /* if not diagonal */
1427               jja[nnz+cnt++] = row[j];
1428             }
1429           }
1430           nnz     += cnt;
1431           iia[i+1] = nnz;
1432         }
1433         /* Partitioning of the adjacency matrix */
1434         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1435         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1436         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1437         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1438         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1439         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1440         foundpart = PETSC_TRUE;
1441       }
1442       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1443     }
1444     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1445   }
1446 
1447   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1448   *outis = is;
1449 
1450   if (!foundpart) {
1451 
1452     /* Partitioning by contiguous chunks of rows */
1453 
1454     PetscInt mbs   = (rend-rstart)/bs;
1455     PetscInt start = rstart;
1456     for (i=0; i<n; i++) {
1457       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1458       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1459       start += count;
1460     }
1461 
1462   } else {
1463 
1464     /* Partitioning by adjacency of diagonal block  */
1465 
1466     const PetscInt *numbering;
1467     PetscInt       *count,nidx,*indices,*newidx,start=0;
1468     /* Get node count in each partition */
1469     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1470     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1471     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1472       for (i=0; i<n; i++) count[i] *= bs;
1473     }
1474     /* Build indices from node numbering */
1475     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1476     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1477     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1478     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1479     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1480     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1481     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1482       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1483       for (i=0; i<nidx; i++) {
1484         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1485       }
1486       ierr    = PetscFree(indices);CHKERRQ(ierr);
1487       nidx   *= bs;
1488       indices = newidx;
1489     }
1490     /* Shift to get global indices */
1491     for (i=0; i<nidx; i++) indices[i] += rstart;
1492 
1493     /* Build the index sets for each block */
1494     for (i=0; i<n; i++) {
1495       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1496       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1497       start += count[i];
1498     }
1499 
1500     ierr = PetscFree(count);CHKERRQ(ierr);
1501     ierr = PetscFree(indices);CHKERRQ(ierr);
1502     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1503     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1504 
1505   }
1506   PetscFunctionReturn(0);
1507 }
1508 
1509 #undef __FUNCT__
1510 #define __FUNCT__ "PCASMDestroySubdomains"
1511 /*@C
1512    PCASMDestroySubdomains - Destroys the index sets created with
1513    PCASMCreateSubdomains(). Should be called after setting subdomains
1514    with PCASMSetLocalSubdomains().
1515 
1516    Collective
1517 
1518    Input Parameters:
1519 +  n - the number of index sets
1520 .  is - the array of index sets
1521 -  is_local - the array of local index sets, can be NULL
1522 
1523    Level: advanced
1524 
1525 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1526 
1527 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1528 @*/
1529 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1530 {
1531   PetscInt       i;
1532   PetscErrorCode ierr;
1533 
1534   PetscFunctionBegin;
1535   if (n <= 0) PetscFunctionReturn(0);
1536   if (is) {
1537     PetscValidPointer(is,2);
1538     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1539     ierr = PetscFree(is);CHKERRQ(ierr);
1540   }
1541   if (is_local) {
1542     PetscValidPointer(is_local,3);
1543     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1544     ierr = PetscFree(is_local);CHKERRQ(ierr);
1545   }
1546   PetscFunctionReturn(0);
1547 }
1548 
1549 #undef __FUNCT__
1550 #define __FUNCT__ "PCASMCreateSubdomains2D"
1551 /*@
1552    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1553    preconditioner for a two-dimensional problem on a regular grid.
1554 
1555    Not Collective
1556 
1557    Input Parameters:
1558 +  m, n - the number of mesh points in the x and y directions
1559 .  M, N - the number of subdomains in the x and y directions
1560 .  dof - degrees of freedom per node
1561 -  overlap - overlap in mesh lines
1562 
1563    Output Parameters:
1564 +  Nsub - the number of subdomains created
1565 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1566 -  is_local - array of index sets defining non-overlapping subdomains
1567 
1568    Note:
1569    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1570    preconditioners.  More general related routines are
1571    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1572 
1573    Level: advanced
1574 
1575 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1576 
1577 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1578           PCASMSetOverlap()
1579 @*/
1580 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1581 {
1582   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1583   PetscErrorCode ierr;
1584   PetscInt       nidx,*idx,loc,ii,jj,count;
1585 
1586   PetscFunctionBegin;
1587   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1588 
1589   *Nsub     = N*M;
1590   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1591   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1592   ystart    = 0;
1593   loc_outer = 0;
1594   for (i=0; i<N; i++) {
1595     height = n/N + ((n % N) > i); /* height of subdomain */
1596     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1597     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1598     yright = ystart + height + overlap; if (yright > n) yright = n;
1599     xstart = 0;
1600     for (j=0; j<M; j++) {
1601       width = m/M + ((m % M) > j); /* width of subdomain */
1602       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1603       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1604       xright = xstart + width + overlap; if (xright > m) xright = m;
1605       nidx   = (xright - xleft)*(yright - yleft);
1606       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1607       loc    = 0;
1608       for (ii=yleft; ii<yright; ii++) {
1609         count = m*ii + xleft;
1610         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1611       }
1612       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1613       if (overlap == 0) {
1614         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1615 
1616         (*is_local)[loc_outer] = (*is)[loc_outer];
1617       } else {
1618         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1619           for (jj=xstart; jj<xstart+width; jj++) {
1620             idx[loc++] = m*ii + jj;
1621           }
1622         }
1623         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1624       }
1625       ierr    = PetscFree(idx);CHKERRQ(ierr);
1626       xstart += width;
1627       loc_outer++;
1628     }
1629     ystart += height;
1630   }
1631   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1632   PetscFunctionReturn(0);
1633 }
1634 
1635 #undef __FUNCT__
1636 #define __FUNCT__ "PCASMGetLocalSubdomains"
1637 /*@C
1638     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1639     only) for the additive Schwarz preconditioner.
1640 
1641     Not Collective
1642 
1643     Input Parameter:
1644 .   pc - the preconditioner context
1645 
1646     Output Parameters:
1647 +   n - the number of subdomains for this processor (default value = 1)
1648 .   is - the index sets that define the subdomains for this processor
1649 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1650 
1651 
1652     Notes:
1653     The IS numbering is in the parallel, global numbering of the vector.
1654 
1655     Level: advanced
1656 
1657 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1658 
1659 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1660           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1661 @*/
1662 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1663 {
1664   PC_ASM         *osm = (PC_ASM*)pc->data;
1665   PetscErrorCode ierr;
1666   PetscBool      match;
1667 
1668   PetscFunctionBegin;
1669   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1670   PetscValidIntPointer(n,2);
1671   if (is) PetscValidPointer(is,3);
1672   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1673   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1674   if (n) *n = osm->n_local_true;
1675   if (is) *is = osm->is;
1676   if (is_local) *is_local = osm->is_local;
1677   PetscFunctionReturn(0);
1678 }
1679 
1680 #undef __FUNCT__
1681 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1682 /*@C
1683     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1684     only) for the additive Schwarz preconditioner.
1685 
1686     Not Collective
1687 
1688     Input Parameter:
1689 .   pc - the preconditioner context
1690 
1691     Output Parameters:
1692 +   n - the number of matrices for this processor (default value = 1)
1693 -   mat - the matrices
1694 
1695 
1696     Level: advanced
1697 
1698     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1699 
1700            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1701 
1702 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1703 
1704 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1705           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1706 @*/
1707 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1708 {
1709   PC_ASM         *osm;
1710   PetscErrorCode ierr;
1711   PetscBool      match;
1712 
1713   PetscFunctionBegin;
1714   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1715   PetscValidIntPointer(n,2);
1716   if (mat) PetscValidPointer(mat,3);
1717   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1718   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1719   if (!match) {
1720     if (n) *n = 0;
1721     if (mat) *mat = NULL;
1722   } else {
1723     osm = (PC_ASM*)pc->data;
1724     if (n) *n = osm->n_local_true;
1725     if (mat) *mat = osm->pmat;
1726   }
1727   PetscFunctionReturn(0);
1728 }
1729 
1730 #undef __FUNCT__
1731 #define __FUNCT__ "PCASMSetDMSubdomains"
1732 /*@
1733     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1734     Logically Collective
1735 
1736     Input Parameter:
1737 +   pc  - the preconditioner
1738 -   flg - boolean indicating whether to use subdomains defined by the DM
1739 
1740     Options Database Key:
1741 .   -pc_asm_dm_subdomains
1742 
1743     Level: intermediate
1744 
1745     Notes:
1746     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1747     so setting either of the first two effectively turns the latter off.
1748 
1749 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1750 
1751 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1752           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1753 @*/
1754 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1755 {
1756   PC_ASM         *osm = (PC_ASM*)pc->data;
1757   PetscErrorCode ierr;
1758   PetscBool      match;
1759 
1760   PetscFunctionBegin;
1761   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1762   PetscValidLogicalCollectiveBool(pc,flg,2);
1763   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1764   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1765   if (match) {
1766     osm->dm_subdomains = flg;
1767   }
1768   PetscFunctionReturn(0);
1769 }
1770 
1771 #undef __FUNCT__
1772 #define __FUNCT__ "PCASMGetDMSubdomains"
1773 /*@
1774     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1775     Not Collective
1776 
1777     Input Parameter:
1778 .   pc  - the preconditioner
1779 
1780     Output Parameter:
1781 .   flg - boolean indicating whether to use subdomains defined by the DM
1782 
1783     Level: intermediate
1784 
1785 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1786 
1787 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1788           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1789 @*/
1790 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1791 {
1792   PC_ASM         *osm = (PC_ASM*)pc->data;
1793   PetscErrorCode ierr;
1794   PetscBool      match;
1795 
1796   PetscFunctionBegin;
1797   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1798   PetscValidPointer(flg,2);
1799   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1800   if (match) {
1801     if (flg) *flg = osm->dm_subdomains;
1802   }
1803   PetscFunctionReturn(0);
1804 }
1805