xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision e0eafd54dae76f7d735a0cd652b182e52af67ddc)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   /* For multiplicative solve */
34   Mat       *lmat;                /* submatrix for overlapping multiplicative (process) subdomain */
35   Vec        lx, ly;              /* work vectors */
36   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
37 } PC_ASM;
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "PCView_ASM"
41 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42 {
43   PC_ASM         *osm = (PC_ASM*)pc->data;
44   PetscErrorCode ierr;
45   PetscMPIInt    rank;
46   PetscInt       i,bsz;
47   PetscBool      iascii,isstring;
48   PetscViewer    sviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
56     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
57     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 #undef __FUNCT__
101 #define __FUNCT__ "PCASMPrintSubdomains"
102 static PetscErrorCode PCASMPrintSubdomains(PC pc)
103 {
104   PC_ASM         *osm = (PC_ASM*)pc->data;
105   const char     *prefix;
106   char           fname[PETSC_MAX_PATH_LEN+1];
107   PetscViewer    viewer, sviewer;
108   char           *s;
109   PetscInt       i,j,nidx;
110   const PetscInt *idx;
111   PetscMPIInt    rank, size;
112   PetscErrorCode ierr;
113 
114   PetscFunctionBegin;
115   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
116   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
117   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
118   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
119   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
120   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
121   for (i=0; i<osm->n_local; i++) {
122     if (i < osm->n_local_true) {
123       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
125       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
126       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
127       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
128       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
129       for (j=0; j<nidx; j++) {
130         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
131       }
132       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
133       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
134       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
137       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
138       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
139       ierr = PetscFree(s);CHKERRQ(ierr);
140       if (osm->is_local) {
141         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
142         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
143         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
144         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
145         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
146         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
147         for (j=0; j<nidx; j++) {
148           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
149         }
150         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
151         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
152         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
153         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
155         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
156         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
157         ierr = PetscFree(s);CHKERRQ(ierr);
158       }
159     } else {
160       /* Participate in collective viewer calls. */
161       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
162       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
163       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
164       /* Assume either all ranks have is_local or none do. */
165       if (osm->is_local) {
166         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
167         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
168         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
169       }
170     }
171   }
172   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
173   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
174   PetscFunctionReturn(0);
175 }
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "PCSetUp_ASM"
179 static PetscErrorCode PCSetUp_ASM(PC pc)
180 {
181   PC_ASM         *osm = (PC_ASM*)pc->data;
182   PetscErrorCode ierr;
183   PetscBool      symset,flg;
184   PetscInt       i,m,m_local;
185   MatReuse       scall = MAT_REUSE_MATRIX;
186   IS             isl;
187   KSP            ksp;
188   PC             subpc;
189   const char     *prefix,*pprefix;
190   Vec            vec;
191   DM             *domain_dm = NULL;
192 
193   PetscFunctionBegin;
194   if (!pc->setupcalled) {
195 
196     if (!osm->type_set) {
197       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
198       if (symset && flg) osm->type = PC_ASM_BASIC;
199     }
200 
201     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
202     if (osm->n_local_true == PETSC_DECIDE) {
203       /* no subdomains given */
204       /* try pc->dm first, if allowed */
205       if (osm->dm_subdomains && pc->dm) {
206         PetscInt  num_domains, d;
207         char      **domain_names;
208         IS        *inner_domain_is, *outer_domain_is;
209         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
210         if (num_domains) {
211           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
212         }
213         for (d = 0; d < num_domains; ++d) {
214           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
215           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
216           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
217         }
218         ierr = PetscFree(domain_names);CHKERRQ(ierr);
219         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
220         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
221       }
222       if (osm->n_local_true == PETSC_DECIDE) {
223         /* still no subdomains; use one subdomain per processor */
224         osm->n_local_true = 1;
225       }
226     }
227     { /* determine the global and max number of subdomains */
228       struct {PetscInt max,sum;} inwork,outwork;
229       inwork.max   = osm->n_local_true;
230       inwork.sum   = osm->n_local_true;
231       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
232       osm->n_local = outwork.max;
233       osm->n       = outwork.sum;
234     }
235     if (!osm->is) { /* create the index sets */
236       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
237     }
238     if (osm->n_local_true > 1 && !osm->is_local) {
239       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
240       for (i=0; i<osm->n_local_true; i++) {
241         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
242           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
243           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
244         } else {
245           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
246           osm->is_local[i] = osm->is[i];
247         }
248       }
249     }
250     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
251     flg  = PETSC_FALSE;
252     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
253     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
254 
255     if (osm->overlap > 0) {
256       /* Extend the "overlapping" regions by a number of steps */
257       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
258     }
259     if (osm->sort_indices) {
260       for (i=0; i<osm->n_local_true; i++) {
261         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
262         if (osm->is_local) {
263           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
264         }
265       }
266     }
267     /* Create the local work vectors and scatter contexts */
268     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
269     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
270     if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
271     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
272     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
273     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
274     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
275     for (i=0; i<osm->n_local_true; ++i) {
276       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
277       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
278       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
279       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
280       ierr = ISDestroy(&isl);CHKERRQ(ierr);
281       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
282       if (osm->is_local) {
283         ISLocalToGlobalMapping ltog;
284         IS                     isll;
285         const PetscInt         *idx_local;
286         PetscInt               *idx,nout;
287 
288         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
289         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
290         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
291         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
292         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
293         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
294         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
295         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
296         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
297         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
298         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
299         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
300         ierr = ISDestroy(&isll);CHKERRQ(ierr);
301 
302         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
303         ierr = ISDestroy(&isl);CHKERRQ(ierr);
304       } else {
305         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
306 
307         osm->y_local[i] = osm->y[i];
308 
309         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
310 
311         osm->prolongation[i] = osm->restriction[i];
312 
313         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
314       }
315     }
316     for (i=osm->n_local_true; i<osm->n_local; i++) {
317       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
318       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
319       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
320       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
321       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
322       if (osm->is_local) {
323         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
324         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
325       } else {
326         osm->prolongation[i] = osm->restriction[i];
327         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
328       }
329       ierr = ISDestroy(&isl);CHKERRQ(ierr);
330     }
331     ierr = VecDestroy(&vec);CHKERRQ(ierr);
332 
333     if (!osm->ksp) {
334       /* Create the local solvers */
335       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
336       if (domain_dm) {
337         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
338       }
339       for (i=0; i<osm->n_local_true; i++) {
340         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
341         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
342         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
343         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
344         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
345         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
346         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
347         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
348         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
349         if (domain_dm) {
350           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
351           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
352           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
353         }
354         osm->ksp[i] = ksp;
355       }
356       if (domain_dm) {
357         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
358       }
359     }
360     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
361       PetscInt m;
362 
363       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
364       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
365       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
366       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
367       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
368     }
369     scall = MAT_INITIAL_MATRIX;
370   } else {
371     /*
372        Destroy the blocks from the previous iteration
373     */
374     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
375       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
376       scall = MAT_INITIAL_MATRIX;
377     }
378   }
379 
380   /*
381      Extract out the submatrices
382   */
383   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
384   if (scall == MAT_INITIAL_MATRIX) {
385     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
386     for (i=0; i<osm->n_local_true; i++) {
387       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
388       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
389     }
390   }
391   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
392     ierr = MatGetSubMatrices(pc->pmat, 1, &osm->lis, &osm->lis, scall, &osm->lmat);CHKERRQ(ierr);
393   }
394 
395   /* Return control to the user so that the submatrices can be modified (e.g., to apply
396      different boundary conditions for the submatrices than for the global problem) */
397   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
398 
399   /*
400      Loop over subdomains putting them into local ksp
401   */
402   for (i=0; i<osm->n_local_true; i++) {
403     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
404     if (!pc->setupcalled) {
405       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
406     }
407   }
408   PetscFunctionReturn(0);
409 }
410 
411 #include <petsc/private/kspimpl.h>
412 #undef __FUNCT__
413 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
414 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
415 {
416   PC_ASM         *osm = (PC_ASM*)pc->data;
417   PetscErrorCode ierr;
418   PetscInt       i;
419 
420   PetscFunctionBegin;
421   for (i=0; i<osm->n_local_true; i++) {
422     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
423     if (osm->ksp[i]->reason == KSP_DIVERGED_PCSETUP_FAILED) {
424       PC subpc=osm->ksp[i]->pc;
425       pc->failedreason = (PCFailedReason)subpc->failedreason;
426     }
427   }
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "PCApply_ASM"
433 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
434 {
435   PC_ASM         *osm = (PC_ASM*)pc->data;
436   PetscErrorCode ierr;
437   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
438   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
439 
440   PetscFunctionBegin;
441   /*
442      Support for limiting the restriction or interpolation to only local
443      subdomain values (leaving the other values 0).
444   */
445   if (!(osm->type & PC_ASM_RESTRICT)) {
446     forward = SCATTER_FORWARD_LOCAL;
447     /* have to zero the work RHS since scatter may leave some slots empty */
448     for (i=0; i<n_local_true; i++) {
449       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
450     }
451   }
452   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
453 
454   switch (osm->loctype)
455   {
456   case PC_COMPOSITE_ADDITIVE:
457     for (i=0; i<n_local; i++) {
458       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
459     }
460     ierr = VecZeroEntries(y);CHKERRQ(ierr);
461     /* do the local solves */
462     for (i=0; i<n_local_true; i++) {
463       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
464       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
465       if (osm->localization) {
466         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
467         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
468       }
469       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
470     }
471     /* handle the rest of the scatters that do not have local solves */
472     for (i=n_local_true; i<n_local; i++) {
473       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
474       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
475     }
476     for (i=0; i<n_local; i++) {
477       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
478     }
479     break;
480   case PC_COMPOSITE_MULTIPLICATIVE:
481     ierr = VecZeroEntries(y);CHKERRQ(ierr);
482     /* do the local solves */
483     for (i = 0; i < n_local_true; ++i) {
484       if (i > 0) {
485         /* Update rhs */
486         ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
487         ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
488       } else {
489         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
490       }
491       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
492       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
493       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
494       if (osm->localization) {
495         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
496         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
497       }
498       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
499       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
500       if (i < n_local_true-1) {
501         ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
502         ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
503         ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr);
504         ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr);
505         ierr = MatMult(osm->lmat[0], osm->ly, osm->lx);CHKERRQ(ierr);
506       }
507     }
508     /* handle the rest of the scatters that do not have local solves */
509     for (i = n_local_true; i < n_local; ++i) {
510       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
511       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
512       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
513       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
514     }
515     break;
516   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
517   }
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "PCApplyTranspose_ASM"
523 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
524 {
525   PC_ASM         *osm = (PC_ASM*)pc->data;
526   PetscErrorCode ierr;
527   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
528   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
529 
530   PetscFunctionBegin;
531   /*
532      Support for limiting the restriction or interpolation to only local
533      subdomain values (leaving the other values 0).
534 
535      Note: these are reversed from the PCApply_ASM() because we are applying the
536      transpose of the three terms
537   */
538   if (!(osm->type & PC_ASM_INTERPOLATE)) {
539     forward = SCATTER_FORWARD_LOCAL;
540     /* have to zero the work RHS since scatter may leave some slots empty */
541     for (i=0; i<n_local_true; i++) {
542       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
543     }
544   }
545   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
546 
547   for (i=0; i<n_local; i++) {
548     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
549   }
550   ierr = VecZeroEntries(y);CHKERRQ(ierr);
551   /* do the local solves */
552   for (i=0; i<n_local_true; i++) {
553     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
554     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
555     if (osm->localization) {
556       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
557       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
558     }
559     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
560   }
561   /* handle the rest of the scatters that do not have local solves */
562   for (i=n_local_true; i<n_local; i++) {
563     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
564     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
565   }
566   for (i=0; i<n_local; i++) {
567     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
568   }
569   PetscFunctionReturn(0);
570 }
571 
572 #undef __FUNCT__
573 #define __FUNCT__ "PCReset_ASM"
574 static PetscErrorCode PCReset_ASM(PC pc)
575 {
576   PC_ASM         *osm = (PC_ASM*)pc->data;
577   PetscErrorCode ierr;
578   PetscInt       i;
579 
580   PetscFunctionBegin;
581   if (osm->ksp) {
582     for (i=0; i<osm->n_local_true; i++) {
583       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
584     }
585   }
586   if (osm->pmat) {
587     if (osm->n_local_true > 0) {
588       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
589     }
590   }
591   if (osm->restriction) {
592     for (i=0; i<osm->n_local; i++) {
593       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
594       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
595       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
596       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
597       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
598       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
599     }
600     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
601     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
602     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
603     ierr = PetscFree(osm->x);CHKERRQ(ierr);
604     ierr = PetscFree(osm->y);CHKERRQ(ierr);
605     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
606   }
607   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
608   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
609     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
610     ierr = MatDestroyMatrices(1, &osm->lmat);CHKERRQ(ierr);
611     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
612     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
613   }
614 
615   osm->is       = 0;
616   osm->is_local = 0;
617   PetscFunctionReturn(0);
618 }
619 
620 #undef __FUNCT__
621 #define __FUNCT__ "PCDestroy_ASM"
622 static PetscErrorCode PCDestroy_ASM(PC pc)
623 {
624   PC_ASM         *osm = (PC_ASM*)pc->data;
625   PetscErrorCode ierr;
626   PetscInt       i;
627 
628   PetscFunctionBegin;
629   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
630   if (osm->ksp) {
631     for (i=0; i<osm->n_local_true; i++) {
632       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
633     }
634     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
635   }
636   ierr = PetscFree(pc->data);CHKERRQ(ierr);
637   PetscFunctionReturn(0);
638 }
639 
640 #undef __FUNCT__
641 #define __FUNCT__ "PCSetFromOptions_ASM"
642 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
643 {
644   PC_ASM         *osm = (PC_ASM*)pc->data;
645   PetscErrorCode ierr;
646   PetscInt       blocks,ovl;
647   PetscBool      symset,flg;
648   PCASMType      asmtype;
649   PCCompositeType loctype;
650 
651   PetscFunctionBegin;
652   /* set the type to symmetric if matrix is symmetric */
653   if (!osm->type_set && pc->pmat) {
654     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
655     if (symset && flg) osm->type = PC_ASM_BASIC;
656   }
657   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
658   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
659   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
660   if (flg) {
661     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
662     osm->dm_subdomains = PETSC_FALSE;
663   }
664   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
665   if (flg) {
666     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
667     osm->dm_subdomains = PETSC_FALSE;
668   }
669   flg  = PETSC_FALSE;
670   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
671   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
672   flg  = PETSC_FALSE;
673   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
674   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
675   ierr = PetscOptionsTail();CHKERRQ(ierr);
676   PetscFunctionReturn(0);
677 }
678 
679 /*------------------------------------------------------------------------------------*/
680 
681 #undef __FUNCT__
682 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
683 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
684 {
685   PC_ASM         *osm = (PC_ASM*)pc->data;
686   PetscErrorCode ierr;
687   PetscInt       i;
688 
689   PetscFunctionBegin;
690   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
691   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
692 
693   if (!pc->setupcalled) {
694     if (is) {
695       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
696     }
697     if (is_local) {
698       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
699     }
700     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
701 
702     osm->n_local_true = n;
703     osm->is           = 0;
704     osm->is_local     = 0;
705     if (is) {
706       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
707       for (i=0; i<n; i++) osm->is[i] = is[i];
708       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
709       osm->overlap = -1;
710     }
711     if (is_local) {
712       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
713       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
714       if (!is) {
715         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
716         for (i=0; i<osm->n_local_true; i++) {
717           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
718             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
719             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
720           } else {
721             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
722             osm->is[i] = osm->is_local[i];
723           }
724         }
725       }
726     }
727   }
728   PetscFunctionReturn(0);
729 }
730 
731 #undef __FUNCT__
732 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
733 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
734 {
735   PC_ASM         *osm = (PC_ASM*)pc->data;
736   PetscErrorCode ierr;
737   PetscMPIInt    rank,size;
738   PetscInt       n;
739 
740   PetscFunctionBegin;
741   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
742   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
743 
744   /*
745      Split the subdomains equally among all processors
746   */
747   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
748   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
749   n    = N/size + ((N % size) > rank);
750   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
751   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
752   if (!pc->setupcalled) {
753     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
754 
755     osm->n_local_true = n;
756     osm->is           = 0;
757     osm->is_local     = 0;
758   }
759   PetscFunctionReturn(0);
760 }
761 
762 #undef __FUNCT__
763 #define __FUNCT__ "PCASMSetOverlap_ASM"
764 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
765 {
766   PC_ASM *osm = (PC_ASM*)pc->data;
767 
768   PetscFunctionBegin;
769   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
770   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
771   if (!pc->setupcalled) osm->overlap = ovl;
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "PCASMSetType_ASM"
777 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
778 {
779   PC_ASM *osm = (PC_ASM*)pc->data;
780 
781   PetscFunctionBegin;
782   osm->type     = type;
783   osm->type_set = PETSC_TRUE;
784   PetscFunctionReturn(0);
785 }
786 
787 #undef __FUNCT__
788 #define __FUNCT__ "PCASMGetType_ASM"
789 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
790 {
791   PC_ASM *osm = (PC_ASM*)pc->data;
792 
793   PetscFunctionBegin;
794   *type = osm->type;
795   PetscFunctionReturn(0);
796 }
797 
798 #undef __FUNCT__
799 #define __FUNCT__ "PCASMSetLocalType_ASM"
800 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
801 {
802   PC_ASM *osm = (PC_ASM *) pc->data;
803 
804   PetscFunctionBegin;
805   osm->loctype = type;
806   PetscFunctionReturn(0);
807 }
808 
809 #undef __FUNCT__
810 #define __FUNCT__ "PCASMGetLocalType_ASM"
811 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
812 {
813   PC_ASM *osm = (PC_ASM *) pc->data;
814 
815   PetscFunctionBegin;
816   *type = osm->loctype;
817   PetscFunctionReturn(0);
818 }
819 
820 #undef __FUNCT__
821 #define __FUNCT__ "PCASMSetSortIndices_ASM"
822 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
823 {
824   PC_ASM *osm = (PC_ASM*)pc->data;
825 
826   PetscFunctionBegin;
827   osm->sort_indices = doSort;
828   PetscFunctionReturn(0);
829 }
830 
831 #undef __FUNCT__
832 #define __FUNCT__ "PCASMGetSubKSP_ASM"
833 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
834 {
835   PC_ASM         *osm = (PC_ASM*)pc->data;
836   PetscErrorCode ierr;
837 
838   PetscFunctionBegin;
839   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
840 
841   if (n_local) *n_local = osm->n_local_true;
842   if (first_local) {
843     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
844     *first_local -= osm->n_local_true;
845   }
846   if (ksp) {
847     /* Assume that local solves are now different; not necessarily
848        true though!  This flag is used only for PCView_ASM() */
849     *ksp                   = osm->ksp;
850     osm->same_local_solves = PETSC_FALSE;
851   }
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "PCASMSetLocalSubdomains"
857 /*@C
858     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
859 
860     Collective on PC
861 
862     Input Parameters:
863 +   pc - the preconditioner context
864 .   n - the number of subdomains for this processor (default value = 1)
865 .   is - the index set that defines the subdomains for this processor
866          (or NULL for PETSc to determine subdomains)
867 -   is_local - the index sets that define the local part of the subdomains for this processor
868          (or NULL to use the default of 1 subdomain per process)
869 
870     Notes:
871     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
872 
873     By default the ASM preconditioner uses 1 block per processor.
874 
875     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
876 
877     Level: advanced
878 
879 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
880 
881 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
882           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
883 @*/
884 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
885 {
886   PetscErrorCode ierr;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
890   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 #undef __FUNCT__
895 #define __FUNCT__ "PCASMSetTotalSubdomains"
896 /*@C
897     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
898     additive Schwarz preconditioner.  Either all or no processors in the
899     PC communicator must call this routine, with the same index sets.
900 
901     Collective on PC
902 
903     Input Parameters:
904 +   pc - the preconditioner context
905 .   N  - the number of subdomains for all processors
906 .   is - the index sets that define the subdomains for all processors
907          (or NULL to ask PETSc to compe up with subdomains)
908 -   is_local - the index sets that define the local part of the subdomains for this processor
909          (or NULL to use the default of 1 subdomain per process)
910 
911     Options Database Key:
912     To set the total number of subdomain blocks rather than specify the
913     index sets, use the option
914 .    -pc_asm_blocks <blks> - Sets total blocks
915 
916     Notes:
917     Currently you cannot use this to set the actual subdomains with the argument is.
918 
919     By default the ASM preconditioner uses 1 block per processor.
920 
921     These index sets cannot be destroyed until after completion of the
922     linear solves for which the ASM preconditioner is being used.
923 
924     Use PCASMSetLocalSubdomains() to set local subdomains.
925 
926     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
927 
928     Level: advanced
929 
930 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
931 
932 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
933           PCASMCreateSubdomains2D()
934 @*/
935 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
936 {
937   PetscErrorCode ierr;
938 
939   PetscFunctionBegin;
940   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
941   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "PCASMSetOverlap"
947 /*@
948     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
949     additive Schwarz preconditioner.  Either all or no processors in the
950     PC communicator must call this routine. If MatIncreaseOverlap is used,
951     use option -mat_increase_overlap when the problem size large.
952 
953     Logically Collective on PC
954 
955     Input Parameters:
956 +   pc  - the preconditioner context
957 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
958 
959     Options Database Key:
960 .   -pc_asm_overlap <ovl> - Sets overlap
961 
962     Notes:
963     By default the ASM preconditioner uses 1 block per processor.  To use
964     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
965     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
966 
967     The overlap defaults to 1, so if one desires that no additional
968     overlap be computed beyond what may have been set with a call to
969     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
970     must be set to be 0.  In particular, if one does not explicitly set
971     the subdomains an application code, then all overlap would be computed
972     internally by PETSc, and using an overlap of 0 would result in an ASM
973     variant that is equivalent to the block Jacobi preconditioner.
974 
975     Note that one can define initial index sets with any overlap via
976     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
977     PCASMSetOverlap() merely allows PETSc to extend that overlap further
978     if desired.
979 
980     Level: intermediate
981 
982 .keywords: PC, ASM, set, overlap
983 
984 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
985           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
986 @*/
987 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
988 {
989   PetscErrorCode ierr;
990 
991   PetscFunctionBegin;
992   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
993   PetscValidLogicalCollectiveInt(pc,ovl,2);
994   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
995   PetscFunctionReturn(0);
996 }
997 
998 #undef __FUNCT__
999 #define __FUNCT__ "PCASMSetType"
1000 /*@
1001     PCASMSetType - Sets the type of restriction and interpolation used
1002     for local problems in the additive Schwarz method.
1003 
1004     Logically Collective on PC
1005 
1006     Input Parameters:
1007 +   pc  - the preconditioner context
1008 -   type - variant of ASM, one of
1009 .vb
1010       PC_ASM_BASIC       - full interpolation and restriction
1011       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1012       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1013       PC_ASM_NONE        - local processor restriction and interpolation
1014 .ve
1015 
1016     Options Database Key:
1017 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1018 
1019     Level: intermediate
1020 
1021 .keywords: PC, ASM, set, type
1022 
1023 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1024           PCASMCreateSubdomains2D()
1025 @*/
1026 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1027 {
1028   PetscErrorCode ierr;
1029 
1030   PetscFunctionBegin;
1031   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1032   PetscValidLogicalCollectiveEnum(pc,type,2);
1033   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1034   PetscFunctionReturn(0);
1035 }
1036 
1037 #undef __FUNCT__
1038 #define __FUNCT__ "PCASMGetType"
1039 /*@
1040     PCASMGetType - Gets the type of restriction and interpolation used
1041     for local problems in the additive Schwarz method.
1042 
1043     Logically Collective on PC
1044 
1045     Input Parameter:
1046 .   pc  - the preconditioner context
1047 
1048     Output Parameter:
1049 .   type - variant of ASM, one of
1050 
1051 .vb
1052       PC_ASM_BASIC       - full interpolation and restriction
1053       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1054       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1055       PC_ASM_NONE        - local processor restriction and interpolation
1056 .ve
1057 
1058     Options Database Key:
1059 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1060 
1061     Level: intermediate
1062 
1063 .keywords: PC, ASM, set, type
1064 
1065 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1066           PCASMCreateSubdomains2D()
1067 @*/
1068 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1069 {
1070   PetscErrorCode ierr;
1071 
1072   PetscFunctionBegin;
1073   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1074   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 #undef __FUNCT__
1079 #define __FUNCT__ "PCASMSetLocalType"
1080 /*@
1081   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1082 
1083   Logically Collective on PC
1084 
1085   Input Parameters:
1086 + pc  - the preconditioner context
1087 - type - type of composition, one of
1088 .vb
1089   PC_COMPOSITE_ADDITIVE       - local additive combination
1090   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1091 .ve
1092 
1093   Options Database Key:
1094 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1095 
1096   Level: intermediate
1097 
1098 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate()
1099 @*/
1100 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1101 {
1102   PetscErrorCode ierr;
1103 
1104   PetscFunctionBegin;
1105   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1106   PetscValidLogicalCollectiveEnum(pc, type, 2);
1107   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 #undef __FUNCT__
1112 #define __FUNCT__ "PCASMGetLocalType"
1113 /*@
1114   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1115 
1116   Logically Collective on PC
1117 
1118   Input Parameter:
1119 . pc  - the preconditioner context
1120 
1121   Output Parameter:
1122 . type - type of composition, one of
1123 .vb
1124   PC_COMPOSITE_ADDITIVE       - local additive combination
1125   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1126 .ve
1127 
1128   Options Database Key:
1129 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1130 
1131   Level: intermediate
1132 
1133 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate()
1134 @*/
1135 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1136 {
1137   PetscErrorCode ierr;
1138 
1139   PetscFunctionBegin;
1140   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1141   PetscValidPointer(type, 2);
1142   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1143   PetscFunctionReturn(0);
1144 }
1145 
1146 #undef __FUNCT__
1147 #define __FUNCT__ "PCASMSetSortIndices"
1148 /*@
1149     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1150 
1151     Logically Collective on PC
1152 
1153     Input Parameters:
1154 +   pc  - the preconditioner context
1155 -   doSort - sort the subdomain indices
1156 
1157     Level: intermediate
1158 
1159 .keywords: PC, ASM, set, type
1160 
1161 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1162           PCASMCreateSubdomains2D()
1163 @*/
1164 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1165 {
1166   PetscErrorCode ierr;
1167 
1168   PetscFunctionBegin;
1169   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1170   PetscValidLogicalCollectiveBool(pc,doSort,2);
1171   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 #undef __FUNCT__
1176 #define __FUNCT__ "PCASMGetSubKSP"
1177 /*@C
1178    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1179    this processor.
1180 
1181    Collective on PC iff first_local is requested
1182 
1183    Input Parameter:
1184 .  pc - the preconditioner context
1185 
1186    Output Parameters:
1187 +  n_local - the number of blocks on this processor or NULL
1188 .  first_local - the global number of the first block on this processor or NULL,
1189                  all processors must request or all must pass NULL
1190 -  ksp - the array of KSP contexts
1191 
1192    Note:
1193    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1194 
1195    Currently for some matrix implementations only 1 block per processor
1196    is supported.
1197 
1198    You must call KSPSetUp() before calling PCASMGetSubKSP().
1199 
1200    Fortran note:
1201    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.
1202 
1203    Level: advanced
1204 
1205 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1206 
1207 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1208           PCASMCreateSubdomains2D(),
1209 @*/
1210 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1211 {
1212   PetscErrorCode ierr;
1213 
1214   PetscFunctionBegin;
1215   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1216   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 /* -------------------------------------------------------------------------------------*/
1221 /*MC
1222    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1223            its own KSP object.
1224 
1225    Options Database Keys:
1226 +  -pc_asm_blocks <blks> - Sets total blocks
1227 .  -pc_asm_overlap <ovl> - Sets overlap
1228 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1229 
1230      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1231       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1232       -pc_asm_type basic to use the standard ASM.
1233 
1234    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1235      than one processor. Defaults to one block per processor.
1236 
1237      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1238         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1239 
1240      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1241          and set the options directly on the resulting KSP object (you can access its PC
1242          with KSPGetPC())
1243 
1244 
1245    Level: beginner
1246 
1247    Concepts: additive Schwarz method
1248 
1249     References:
1250     An additive variant of the Schwarz alternating method for the case of many subregions
1251     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1252 
1253     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1254     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1255 
1256 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1257            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1258            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
1259 
1260 M*/
1261 
1262 #undef __FUNCT__
1263 #define __FUNCT__ "PCCreate_ASM"
1264 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1265 {
1266   PetscErrorCode ierr;
1267   PC_ASM         *osm;
1268 
1269   PetscFunctionBegin;
1270   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1271 
1272   osm->n                 = PETSC_DECIDE;
1273   osm->n_local           = 0;
1274   osm->n_local_true      = PETSC_DECIDE;
1275   osm->overlap           = 1;
1276   osm->ksp               = 0;
1277   osm->restriction       = 0;
1278   osm->localization      = 0;
1279   osm->prolongation      = 0;
1280   osm->x                 = 0;
1281   osm->y                 = 0;
1282   osm->y_local           = 0;
1283   osm->is                = 0;
1284   osm->is_local          = 0;
1285   osm->mat               = 0;
1286   osm->pmat              = 0;
1287   osm->type              = PC_ASM_RESTRICT;
1288   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1289   osm->same_local_solves = PETSC_TRUE;
1290   osm->sort_indices      = PETSC_TRUE;
1291   osm->dm_subdomains     = PETSC_FALSE;
1292 
1293   pc->data                 = (void*)osm;
1294   pc->ops->apply           = PCApply_ASM;
1295   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1296   pc->ops->setup           = PCSetUp_ASM;
1297   pc->ops->reset           = PCReset_ASM;
1298   pc->ops->destroy         = PCDestroy_ASM;
1299   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1300   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1301   pc->ops->view            = PCView_ASM;
1302   pc->ops->applyrichardson = 0;
1303 
1304   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1305   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1306   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1307   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1308   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1309   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1310   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1311   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1312   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1313   PetscFunctionReturn(0);
1314 }
1315 
1316 #undef __FUNCT__
1317 #define __FUNCT__ "PCASMCreateSubdomains"
1318 /*@C
1319    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1320    preconditioner for a any problem on a general grid.
1321 
1322    Collective
1323 
1324    Input Parameters:
1325 +  A - The global matrix operator
1326 -  n - the number of local blocks
1327 
1328    Output Parameters:
1329 .  outis - the array of index sets defining the subdomains
1330 
1331    Level: advanced
1332 
1333    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1334     from these if you use PCASMSetLocalSubdomains()
1335 
1336     In the Fortran version you must provide the array outis[] already allocated of length n.
1337 
1338 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1339 
1340 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1341 @*/
1342 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1343 {
1344   MatPartitioning mpart;
1345   const char      *prefix;
1346   PetscErrorCode  (*f)(Mat,Mat*);
1347   PetscMPIInt     size;
1348   PetscInt        i,j,rstart,rend,bs;
1349   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1350   Mat             Ad     = NULL, adj;
1351   IS              ispart,isnumb,*is;
1352   PetscErrorCode  ierr;
1353 
1354   PetscFunctionBegin;
1355   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1356   PetscValidPointer(outis,3);
1357   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1358 
1359   /* Get prefix, row distribution, and block size */
1360   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1361   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1362   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1363   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1364 
1365   /* Get diagonal block from matrix if possible */
1366   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1367   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);CHKERRQ(ierr);
1368   if (f) {
1369     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1370   } else if (size == 1) {
1371     Ad = A;
1372   }
1373   if (Ad) {
1374     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1375     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1376   }
1377   if (Ad && n > 1) {
1378     PetscBool match,done;
1379     /* Try to setup a good matrix partitioning if available */
1380     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1381     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1382     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1383     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1384     if (!match) {
1385       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1386     }
1387     if (!match) { /* assume a "good" partitioner is available */
1388       PetscInt       na;
1389       const PetscInt *ia,*ja;
1390       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1391       if (done) {
1392         /* Build adjacency matrix by hand. Unfortunately a call to
1393            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1394            remove the block-aij structure and we cannot expect
1395            MatPartitioning to split vertices as we need */
1396         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1397         const PetscInt *row;
1398         nnz = 0;
1399         for (i=0; i<na; i++) { /* count number of nonzeros */
1400           len = ia[i+1] - ia[i];
1401           row = ja + ia[i];
1402           for (j=0; j<len; j++) {
1403             if (row[j] == i) { /* don't count diagonal */
1404               len--; break;
1405             }
1406           }
1407           nnz += len;
1408         }
1409         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1410         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1411         nnz    = 0;
1412         iia[0] = 0;
1413         for (i=0; i<na; i++) { /* fill adjacency */
1414           cnt = 0;
1415           len = ia[i+1] - ia[i];
1416           row = ja + ia[i];
1417           for (j=0; j<len; j++) {
1418             if (row[j] != i) { /* if not diagonal */
1419               jja[nnz+cnt++] = row[j];
1420             }
1421           }
1422           nnz     += cnt;
1423           iia[i+1] = nnz;
1424         }
1425         /* Partitioning of the adjacency matrix */
1426         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1427         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1428         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1429         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1430         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1431         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1432         foundpart = PETSC_TRUE;
1433       }
1434       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1435     }
1436     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1437   }
1438 
1439   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1440   *outis = is;
1441 
1442   if (!foundpart) {
1443 
1444     /* Partitioning by contiguous chunks of rows */
1445 
1446     PetscInt mbs   = (rend-rstart)/bs;
1447     PetscInt start = rstart;
1448     for (i=0; i<n; i++) {
1449       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1450       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1451       start += count;
1452     }
1453 
1454   } else {
1455 
1456     /* Partitioning by adjacency of diagonal block  */
1457 
1458     const PetscInt *numbering;
1459     PetscInt       *count,nidx,*indices,*newidx,start=0;
1460     /* Get node count in each partition */
1461     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1462     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1463     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1464       for (i=0; i<n; i++) count[i] *= bs;
1465     }
1466     /* Build indices from node numbering */
1467     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1468     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1469     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1470     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1471     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1472     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1473     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1474       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1475       for (i=0; i<nidx; i++) {
1476         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1477       }
1478       ierr    = PetscFree(indices);CHKERRQ(ierr);
1479       nidx   *= bs;
1480       indices = newidx;
1481     }
1482     /* Shift to get global indices */
1483     for (i=0; i<nidx; i++) indices[i] += rstart;
1484 
1485     /* Build the index sets for each block */
1486     for (i=0; i<n; i++) {
1487       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1488       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1489       start += count[i];
1490     }
1491 
1492     ierr = PetscFree(count);CHKERRQ(ierr);
1493     ierr = PetscFree(indices);CHKERRQ(ierr);
1494     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1495     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1496 
1497   }
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 #undef __FUNCT__
1502 #define __FUNCT__ "PCASMDestroySubdomains"
1503 /*@C
1504    PCASMDestroySubdomains - Destroys the index sets created with
1505    PCASMCreateSubdomains(). Should be called after setting subdomains
1506    with PCASMSetLocalSubdomains().
1507 
1508    Collective
1509 
1510    Input Parameters:
1511 +  n - the number of index sets
1512 .  is - the array of index sets
1513 -  is_local - the array of local index sets, can be NULL
1514 
1515    Level: advanced
1516 
1517 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1518 
1519 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1520 @*/
1521 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1522 {
1523   PetscInt       i;
1524   PetscErrorCode ierr;
1525 
1526   PetscFunctionBegin;
1527   if (n <= 0) PetscFunctionReturn(0);
1528   if (is) {
1529     PetscValidPointer(is,2);
1530     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1531     ierr = PetscFree(is);CHKERRQ(ierr);
1532   }
1533   if (is_local) {
1534     PetscValidPointer(is_local,3);
1535     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1536     ierr = PetscFree(is_local);CHKERRQ(ierr);
1537   }
1538   PetscFunctionReturn(0);
1539 }
1540 
1541 #undef __FUNCT__
1542 #define __FUNCT__ "PCASMCreateSubdomains2D"
1543 /*@
1544    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1545    preconditioner for a two-dimensional problem on a regular grid.
1546 
1547    Not Collective
1548 
1549    Input Parameters:
1550 +  m, n - the number of mesh points in the x and y directions
1551 .  M, N - the number of subdomains in the x and y directions
1552 .  dof - degrees of freedom per node
1553 -  overlap - overlap in mesh lines
1554 
1555    Output Parameters:
1556 +  Nsub - the number of subdomains created
1557 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1558 -  is_local - array of index sets defining non-overlapping subdomains
1559 
1560    Note:
1561    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1562    preconditioners.  More general related routines are
1563    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1564 
1565    Level: advanced
1566 
1567 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1568 
1569 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1570           PCASMSetOverlap()
1571 @*/
1572 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1573 {
1574   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1575   PetscErrorCode ierr;
1576   PetscInt       nidx,*idx,loc,ii,jj,count;
1577 
1578   PetscFunctionBegin;
1579   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1580 
1581   *Nsub     = N*M;
1582   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1583   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1584   ystart    = 0;
1585   loc_outer = 0;
1586   for (i=0; i<N; i++) {
1587     height = n/N + ((n % N) > i); /* height of subdomain */
1588     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1589     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1590     yright = ystart + height + overlap; if (yright > n) yright = n;
1591     xstart = 0;
1592     for (j=0; j<M; j++) {
1593       width = m/M + ((m % M) > j); /* width of subdomain */
1594       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1595       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1596       xright = xstart + width + overlap; if (xright > m) xright = m;
1597       nidx   = (xright - xleft)*(yright - yleft);
1598       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1599       loc    = 0;
1600       for (ii=yleft; ii<yright; ii++) {
1601         count = m*ii + xleft;
1602         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1603       }
1604       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1605       if (overlap == 0) {
1606         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1607 
1608         (*is_local)[loc_outer] = (*is)[loc_outer];
1609       } else {
1610         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1611           for (jj=xstart; jj<xstart+width; jj++) {
1612             idx[loc++] = m*ii + jj;
1613           }
1614         }
1615         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1616       }
1617       ierr    = PetscFree(idx);CHKERRQ(ierr);
1618       xstart += width;
1619       loc_outer++;
1620     }
1621     ystart += height;
1622   }
1623   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1624   PetscFunctionReturn(0);
1625 }
1626 
1627 #undef __FUNCT__
1628 #define __FUNCT__ "PCASMGetLocalSubdomains"
1629 /*@C
1630     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1631     only) for the additive Schwarz preconditioner.
1632 
1633     Not Collective
1634 
1635     Input Parameter:
1636 .   pc - the preconditioner context
1637 
1638     Output Parameters:
1639 +   n - the number of subdomains for this processor (default value = 1)
1640 .   is - the index sets that define the subdomains for this processor
1641 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1642 
1643 
1644     Notes:
1645     The IS numbering is in the parallel, global numbering of the vector.
1646 
1647     Level: advanced
1648 
1649 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1650 
1651 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1652           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1653 @*/
1654 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1655 {
1656   PC_ASM         *osm;
1657   PetscErrorCode ierr;
1658   PetscBool      match;
1659 
1660   PetscFunctionBegin;
1661   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1662   PetscValidIntPointer(n,2);
1663   if (is) PetscValidPointer(is,3);
1664   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1665   if (!match) {
1666     if (n) *n = 0;
1667     if (is) *is = NULL;
1668   } else {
1669     osm = (PC_ASM*)pc->data;
1670     if (n) *n = osm->n_local_true;
1671     if (is) *is = osm->is;
1672     if (is_local) *is_local = osm->is_local;
1673   }
1674   PetscFunctionReturn(0);
1675 }
1676 
1677 #undef __FUNCT__
1678 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1679 /*@C
1680     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1681     only) for the additive Schwarz preconditioner.
1682 
1683     Not Collective
1684 
1685     Input Parameter:
1686 .   pc - the preconditioner context
1687 
1688     Output Parameters:
1689 +   n - the number of matrices for this processor (default value = 1)
1690 -   mat - the matrices
1691 
1692 
1693     Level: advanced
1694 
1695     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1696 
1697            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1698 
1699 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1700 
1701 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1702           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1703 @*/
1704 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1705 {
1706   PC_ASM         *osm;
1707   PetscErrorCode ierr;
1708   PetscBool      match;
1709 
1710   PetscFunctionBegin;
1711   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1712   PetscValidIntPointer(n,2);
1713   if (mat) PetscValidPointer(mat,3);
1714   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1715   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1716   if (!match) {
1717     if (n) *n = 0;
1718     if (mat) *mat = NULL;
1719   } else {
1720     osm = (PC_ASM*)pc->data;
1721     if (n) *n = osm->n_local_true;
1722     if (mat) *mat = osm->pmat;
1723   }
1724   PetscFunctionReturn(0);
1725 }
1726 
1727 #undef __FUNCT__
1728 #define __FUNCT__ "PCASMSetDMSubdomains"
1729 /*@
1730     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1731     Logically Collective
1732 
1733     Input Parameter:
1734 +   pc  - the preconditioner
1735 -   flg - boolean indicating whether to use subdomains defined by the DM
1736 
1737     Options Database Key:
1738 .   -pc_asm_dm_subdomains
1739 
1740     Level: intermediate
1741 
1742     Notes:
1743     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1744     so setting either of the first two effectively turns the latter off.
1745 
1746 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1747 
1748 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1749           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1750 @*/
1751 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1752 {
1753   PC_ASM         *osm = (PC_ASM*)pc->data;
1754   PetscErrorCode ierr;
1755   PetscBool      match;
1756 
1757   PetscFunctionBegin;
1758   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1759   PetscValidLogicalCollectiveBool(pc,flg,2);
1760   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1761   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1762   if (match) {
1763     osm->dm_subdomains = flg;
1764   }
1765   PetscFunctionReturn(0);
1766 }
1767 
1768 #undef __FUNCT__
1769 #define __FUNCT__ "PCASMGetDMSubdomains"
1770 /*@
1771     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1772     Not Collective
1773 
1774     Input Parameter:
1775 .   pc  - the preconditioner
1776 
1777     Output Parameter:
1778 .   flg - boolean indicating whether to use subdomains defined by the DM
1779 
1780     Level: intermediate
1781 
1782 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1783 
1784 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1785           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1786 @*/
1787 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1788 {
1789   PC_ASM         *osm = (PC_ASM*)pc->data;
1790   PetscErrorCode ierr;
1791   PetscBool      match;
1792 
1793   PetscFunctionBegin;
1794   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1795   PetscValidPointer(flg,2);
1796   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1797   if (match) {
1798     if (flg) *flg = osm->dm_subdomains;
1799   }
1800   PetscFunctionReturn(0);
1801 }
1802