xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision a57dd577347cf357bc3142e2987c5ffed089475f)
1 /*
2   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3   In this version each processor may intersect multiple subdomains and any subdomain may
4   intersect multiple processors.  Intersections of subdomains with processors are called *local
5   subdomains*.
6 
7        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
8        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
10 */
11 #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
12 #include <petscdm.h>
13 
14 typedef struct {
15   PetscInt   N,n,nmax;
16   PetscInt   overlap;             /* overlap requested by user */
17   KSP        *ksp;                /* linear solvers for each block */
18   Vec        gx,gy;               /* Merged work vectors */
19   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
20   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
21   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
22   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
23   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
24   Mat        *pmat;               /* subdomain block matrices */
25   PCGASMType type;                /* use reduced interpolation, restriction or both */
26   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
27   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;           /* flag to sort subdomain indices */
30   PetscBool  dm_subdomains;          /* whether DM is allowed to define subdomains */
31 } PC_GASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCGASMSubdomainView_Private"
35 static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
36 {
37   PC_GASM        *osm = (PC_GASM*)pc->data;
38   PetscInt       j,nidx;
39   const PetscInt *idx;
40   PetscViewer    sviewer;
41   char           *cidx;
42   PetscErrorCode ierr;
43 
44   PetscFunctionBegin;
45   if (i < 0 || i > osm->n) SETERRQ2(PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
46   /* Inner subdomains. */
47   ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr);
48   /*
49    No more than 15 characters per index plus a space.
50    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
51    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
52    For nidx == 0, the whole string 16 '\0'.
53    */
54   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
55   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
56   ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr);
57   for (j = 0; j < nidx; ++j) {
58     ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr);
59   }
60   ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr);
61   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
62   ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr);
63   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
64   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
65   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
66   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
67   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
68   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
69   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
70   ierr = PetscFree(cidx);CHKERRQ(ierr);
71   /* Outer subdomains. */
72   ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr);
73   /*
74    No more than 15 characters per index plus a space.
75    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
76    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
77    For nidx == 0, the whole string 16 '\0'.
78    */
79   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
80   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
81   ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr);
82   for (j = 0; j < nidx; ++j) {
83     ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr);
84   }
85   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
86   ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr);
87   ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr);
88   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
90   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
91   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
92   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
93   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
94   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
95   ierr = PetscFree(cidx);CHKERRQ(ierr);
96   PetscFunctionReturn(0);
97 }
98 
99 #undef __FUNCT__
100 #define __FUNCT__ "PCGASMPrintSubdomains"
101 static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
102 {
103   PC_GASM        *osm = (PC_GASM*)pc->data;
104   const char     *prefix;
105   char           fname[PETSC_MAX_PATH_LEN+1];
106   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
107   PetscBool      found;
108   PetscViewer    viewer, sviewer = NULL;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
113   for (i = 0; i < osm->n; ++i) permutation[i] = i;
114   ierr = PetscObjectsGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
115   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
116   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
117   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr);
118   if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
119   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
120   /*
121    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
122    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
123   */
124   ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
125   l    = 0;
126   for (count = 0; count < gcount; ++count) {
127     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
128     if (l<osm->n) {
129       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
130       if (numbering[d] == count) {
131         ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
132         ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
133         ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
134         ++l;
135       }
136     }
137     ierr = MPI_Barrier(PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
138   }
139   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
140   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
141   PetscFunctionReturn(0);
142 }
143 
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "PCView_GASM"
147 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
148 {
149   PC_GASM        *osm = (PC_GASM*)pc->data;
150   const char     *prefix;
151   PetscErrorCode ierr;
152   PetscMPIInt    rank, size;
153   PetscInt       i,bsz;
154   PetscBool      iascii,view_subdomains=PETSC_FALSE;
155   PetscViewer    sviewer;
156   PetscInt       count, l, gcount, *numbering, *permutation;
157   char           overlap[256]     = "user-defined overlap";
158   char           gsubdomains[256] = "unknown total number of subdomains";
159   char           lsubdomains[256] = "unknown number of local  subdomains";
160   char           msubdomains[256] = "unknown max number of local subdomains";
161 
162   PetscFunctionBegin;
163   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
164   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
165   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
166   for (i = 0; i < osm->n; ++i) permutation[i] = i;
167   ierr = PetscObjectsGetGlobalNumbering(PetscObjectComm((PetscObject)pc), osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
168   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
169 
170   if (osm->overlap >= 0) {
171     ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr);
172   }
173   ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);CHKERRQ(ierr);
174   if (osm->N > 0) {
175     ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);CHKERRQ(ierr);
176   }
177   if (osm->nmax > 0) {
178     ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr);
179   }
180 
181   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
182   ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,NULL);CHKERRQ(ierr);
183 
184   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
185   if (iascii) {
186     /*
187      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
188      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
189      collectively on the comm.
190      */
191     ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
192     ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");CHKERRQ(ierr);
193     ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
194     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);CHKERRQ(ierr);
195     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr);
196     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr);
197     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr);
198     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
199     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",(int)rank,(int)size,osm->n);CHKERRQ(ierr);
200     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
201     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
202     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
203     ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr);
204     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
205     ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
206     /* Make sure that everybody waits for the banner to be printed. */
207     ierr = MPI_Barrier(PetscObjectComm((PetscObject)viewer));CHKERRQ(ierr);
208     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
209     l = 0;
210     for (count = 0; count < gcount; ++count) {
211       PetscMPIInt srank, ssize;
212       if (l<osm->n) {
213         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
214         if (numbering[d] == count) {
215           ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr);
216           ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr);
217           ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
218           ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr);
219           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr);
220           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%D:%D] (subcomm [%D:%D]) local subdomain number %D, local size = %D\n",(int)rank,(int)size,(int)srank,(int)ssize,d,bsz);CHKERRQ(ierr);
221           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
222           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr);
223           if (view_subdomains) {
224             ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
225           }
226           if (!pc->setupcalled) {
227             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr);
228           } else {
229             ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr);
230           }
231           ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
232           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
233           ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
234           ++l;
235         }
236       }
237       ierr = MPI_Barrier(PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
238     }
239     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
242   PetscFunctionReturn(0);
243 }
244 
245 
246 
247 
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "PCSetUp_GASM"
251 static PetscErrorCode PCSetUp_GASM(PC pc)
252 {
253   PC_GASM        *osm = (PC_GASM*)pc->data;
254   PetscErrorCode ierr;
255   PetscBool      symset,flg;
256   PetscInt       i;
257   PetscMPIInt    rank, size;
258   MatReuse       scall = MAT_REUSE_MATRIX;
259   KSP            ksp;
260   PC             subpc;
261   const char     *prefix,*pprefix;
262   Vec            x,y;
263   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
264   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
265   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
266   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
267   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
268   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
269   PetscScalar    *gxarray, *gyarray;
270   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- over the disjoint union of outer subdomains. */
271   DM             *subdomain_dm = NULL;
272 
273   PetscFunctionBegin;
274   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
275   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
276   if (!pc->setupcalled) {
277 
278     if (!osm->type_set) {
279       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
280       if (symset && flg) osm->type = PC_GASM_BASIC;
281     }
282 
283     /*
284      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
285      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
286      */
287     if (osm->n == PETSC_DECIDE) {
288       /* no subdomains given */
289       /* try pc->dm first, if allowed */
290       if (osm->dm_subdomains && pc->dm) {
291         PetscInt  num_subdomains, d;
292         char      **subdomain_names;
293         IS        *inner_subdomain_is, *outer_subdomain_is;
294         ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr);
295         if (num_subdomains) {
296           ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr);
297         }
298         for (d = 0; d < num_subdomains; ++d) {
299           if (subdomain_names)    {ierr = PetscFree(subdomain_names[d]);CHKERRQ(ierr);}
300           if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);}
301           if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);}
302         }
303         ierr = PetscFree(subdomain_names);CHKERRQ(ierr);
304         ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr);
305         ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr);
306       }
307       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
308         osm->nmax = osm->n = 1;
309         ierr      = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
310         osm->N    = size;
311       }
312     }
313     if (!osm->iis) {
314       /*
315        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
316        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
317        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
318        */
319       ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr);
320     }
321     if (osm->N == PETSC_DECIDE) {
322       struct {PetscInt max,sum;} inwork,outwork;
323       /* determine global number of subdomains and the max number of local subdomains */
324       inwork.max = osm->n;
325       inwork.sum = osm->n;
326       ierr       = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
327       osm->nmax  = outwork.max;
328       osm->N     = outwork.sum;
329     }
330 
331     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
332     flg  = PETSC_FALSE;
333     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
334     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
335 
336     if (osm->sort_indices) {
337       for (i=0; i<osm->n; i++) {
338         ierr = ISSort(osm->ois[i]);CHKERRQ(ierr);
339         ierr = ISSort(osm->iis[i]);CHKERRQ(ierr);
340       }
341     }
342     /*
343      Merge the ISs, create merged vectors and restrictions.
344      */
345     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
346     on = 0;
347     for (i=0; i<osm->n; i++) {
348       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
349       on  += oni;
350     }
351     ierr = PetscMalloc(on*sizeof(PetscInt), &oidx);CHKERRQ(ierr);
352     on   = 0;
353     for (i=0; i<osm->n; i++) {
354       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
355       ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr);
356       ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr);
357       ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr);
358       on  += oni;
359     }
360     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr);
361     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
362     ierr = VecCreateMPI(PetscObjectComm((PetscObject)pc), on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr);
363     ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr);
364     ierr = VecGetOwnershipRange(osm->gx, &gofirst, NULL);CHKERRQ(ierr);
365     ierr = ISCreateStride(PetscObjectComm((PetscObject)pc),on,gofirst,1, &goid);CHKERRQ(ierr);
366     ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr);
367     ierr = VecDestroy(&x);CHKERRQ(ierr);
368     ierr = ISDestroy(&gois);CHKERRQ(ierr);
369 
370     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
371     {
372       PetscInt ini;           /* Number of indices the i-th a local inner subdomain. */
373       PetscInt in;            /* Number of indices in the disjoint uniont of local inner subdomains. */
374       PetscInt *iidx;         /* Global indices in the merged local inner subdomain. */
375       PetscInt *ioidx;        /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
376       IS       giis;          /* IS for the disjoint union of inner subdomains. */
377       IS       giois;         /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
378       /**/
379       in = 0;
380       for (i=0; i<osm->n; i++) {
381         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
382         in  += ini;
383       }
384       ierr = PetscMalloc(in*sizeof(PetscInt), &iidx);CHKERRQ(ierr);
385       ierr = PetscMalloc(in*sizeof(PetscInt), &ioidx);CHKERRQ(ierr);
386       ierr = VecGetOwnershipRange(osm->gx,&gofirst, NULL);CHKERRQ(ierr);
387       in   = 0;
388       on   = 0;
389       for (i=0; i<osm->n; i++) {
390         const PetscInt         *iidxi; /* Global indices of the i-th local inner subdomain. */
391         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
392         PetscInt               *ioidxi; /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
393         PetscInt               ioni;  /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
394         PetscInt               k;
395         ierr   = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
396         ierr   = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
397         ierr   = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr);
398         ierr   = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr);
399         ierr   = ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);CHKERRQ(ierr);
400         ioidxi = ioidx+in;
401         ierr   = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr);
402         ierr   = ISLocalToGlobalMappingDestroy(&ltogi);CHKERRQ(ierr);
403         ierr   = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr);
404         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
405         for (k = 0; k < ini; ++k) ioidxi[k] += gofirst+on;
406         in += ini;
407         on += oni;
408       }
409       ierr = ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, iidx,  PETSC_OWN_POINTER, &giis);CHKERRQ(ierr);
410       ierr = ISCreateGeneral(PetscObjectComm((PetscObject)pc), in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr);
411       ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr);
412       ierr = VecDestroy(&y);CHKERRQ(ierr);
413       ierr = ISDestroy(&giis);CHKERRQ(ierr);
414       ierr = ISDestroy(&giois);CHKERRQ(ierr);
415     }
416     ierr = ISDestroy(&goid);CHKERRQ(ierr);
417 
418     /* Create the subdomain work vectors. */
419     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
420     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
421     ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr);
422     ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr);
423     for (i=0, on=0; i<osm->n; ++i, on += oni) {
424       PetscInt oNi;
425       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
426       ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr);
427       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr);
428       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr);
429     }
430     ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr);
431     ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr);
432     /* Create the local solvers */
433     ierr = PetscMalloc(osm->n*sizeof(KSP*),&osm->ksp);CHKERRQ(ierr);
434     for (i=0; i<osm->n; i++) { /* KSPs are local */
435       ierr        = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr);
436       ierr        = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
437       ierr        = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
438       ierr        = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
439       ierr        = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
440       ierr        = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
441       ierr        = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
442       ierr        = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
443       osm->ksp[i] = ksp;
444     }
445     scall = MAT_INITIAL_MATRIX;
446 
447   } else { /*if (!pc->setupcalled)*/
448     /*
449        Destroy the blocks from the previous iteration
450     */
451     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
452       ierr  = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
453       scall = MAT_INITIAL_MATRIX;
454     }
455   }
456 
457   /*
458      Extract out the submatrices.
459   */
460   if (size > 1) {
461     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
462   } else {
463     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
464   }
465   if (scall == MAT_INITIAL_MATRIX) {
466     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
467     for (i=0; i<osm->n; i++) {
468       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
469       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
470     }
471   }
472 
473   /* Return control to the user so that the submatrices can be modified (e.g., to apply
474      different boundary conditions for the submatrices than for the global problem) */
475   ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
476 
477   /*
478      Loop over submatrices putting them into local ksps
479   */
480   for (i=0; i<osm->n; i++) {
481     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
482     if (!pc->setupcalled) {
483       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
484     }
485   }
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
491 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
492 {
493   PC_GASM        *osm = (PC_GASM*)pc->data;
494   PetscErrorCode ierr;
495   PetscInt       i;
496 
497   PetscFunctionBegin;
498   for (i=0; i<osm->n; i++) {
499     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
500   }
501   PetscFunctionReturn(0);
502 }
503 
504 #undef __FUNCT__
505 #define __FUNCT__ "PCApply_GASM"
506 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
507 {
508   PC_GASM        *osm = (PC_GASM*)pc->data;
509   PetscErrorCode ierr;
510   PetscInt       i;
511   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
512 
513   PetscFunctionBegin;
514   /*
515      Support for limiting the restriction or interpolation only to the inner
516      subdomain values (leaving the other values 0).
517   */
518   if (!(osm->type & PC_GASM_RESTRICT)) {
519     /* have to zero the work RHS since scatter may leave some slots empty */
520     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
521     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
522   } else {
523     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
524   }
525   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
526   if (!(osm->type & PC_GASM_RESTRICT)) {
527     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
528   } else {
529     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
530   }
531   /* do the subdomain solves */
532   for (i=0; i<osm->n; ++i) {
533     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
534   }
535   ierr = VecZeroEntries(y);CHKERRQ(ierr);
536   if (!(osm->type & PC_GASM_INTERPOLATE)) {
537     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
538     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
539   } else {
540     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
541     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
542   }
543 }
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "PCApplyTranspose_GASM"
547 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
548 {
549   PC_GASM        *osm = (PC_GASM*)pc->data;
550   PetscErrorCode ierr;
551   PetscInt       i;
552   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
553 
554   PetscFunctionBegin;
555   /*
556      Support for limiting the restriction or interpolation to only local
557      subdomain values (leaving the other values 0).
558 
559      Note: these are reversed from the PCApply_GASM() because we are applying the
560      transpose of the three terms
561   */
562   if (!(osm->type & PC_GASM_INTERPOLATE)) {
563     /* have to zero the work RHS since scatter may leave some slots empty */
564     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
565     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
566   } else {
567     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
568   }
569   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
570   if (!(osm->type & PC_GASM_INTERPOLATE)) {
571     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
572   } else {
573     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
574   }
575   /* do the local solves */
576   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
577     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
578   }
579   ierr = VecZeroEntries(y);CHKERRQ(ierr);
580   if (!(osm->type & PC_GASM_RESTRICT)) {
581     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
582     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
583   } else {
584     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
585     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
586   }
587   PetscFunctionReturn(0);
588 }
589 
590 #undef __FUNCT__
591 #define __FUNCT__ "PCReset_GASM"
592 static PetscErrorCode PCReset_GASM(PC pc)
593 {
594   PC_GASM        *osm = (PC_GASM*)pc->data;
595   PetscErrorCode ierr;
596   PetscInt       i;
597 
598   PetscFunctionBegin;
599   if (osm->ksp) {
600     for (i=0; i<osm->n; i++) {
601       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
602     }
603   }
604   if (osm->pmat) {
605     if (osm->n > 0) {
606       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
607     }
608   }
609   if (osm->x) {
610     for (i=0; i<osm->n; i++) {
611       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
612       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
613     }
614   }
615   ierr = VecDestroy(&osm->gx);CHKERRQ(ierr);
616   ierr = VecDestroy(&osm->gy);CHKERRQ(ierr);
617 
618   ierr     = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr);
619   ierr     = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr);
620   ierr     = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr);
621   osm->ois = 0;
622   osm->iis = 0;
623   PetscFunctionReturn(0);
624 }
625 
626 #undef __FUNCT__
627 #define __FUNCT__ "PCDestroy_GASM"
628 static PetscErrorCode PCDestroy_GASM(PC pc)
629 {
630   PC_GASM        *osm = (PC_GASM*)pc->data;
631   PetscErrorCode ierr;
632   PetscInt       i;
633 
634   PetscFunctionBegin;
635   ierr = PCReset_GASM(pc);CHKERRQ(ierr);
636   if (osm->ksp) {
637     for (i=0; i<osm->n; i++) {
638       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
639     }
640     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
641   }
642   ierr = PetscFree(osm->x);CHKERRQ(ierr);
643   ierr = PetscFree(osm->y);CHKERRQ(ierr);
644   ierr = PetscFree(pc->data);CHKERRQ(ierr);
645   PetscFunctionReturn(0);
646 }
647 
648 #undef __FUNCT__
649 #define __FUNCT__ "PCSetFromOptions_GASM"
650 static PetscErrorCode PCSetFromOptions_GASM(PC pc)
651 {
652   PC_GASM        *osm = (PC_GASM*)pc->data;
653   PetscErrorCode ierr;
654   PetscInt       blocks,ovl;
655   PetscBool      symset,flg;
656   PCGASMType     gasmtype;
657 
658   PetscFunctionBegin;
659   /* set the type to symmetric if matrix is symmetric */
660   if (!osm->type_set && pc->pmat) {
661     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
662     if (symset && flg) osm->type = PC_GASM_BASIC;
663   }
664   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
665   ierr = PetscOptionsBool("-pc_gasm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCGASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
666   ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
667   if (flg) {
668     osm->create_local = PETSC_TRUE;
669     ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,NULL);CHKERRQ(ierr);
670     ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr);
671     osm->dm_subdomains = PETSC_FALSE;
672   }
673   ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
674   if (flg) {
675     ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr);
676     osm->dm_subdomains = PETSC_FALSE;
677   }
678   flg  = PETSC_FALSE;
679   ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
680   if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
681   ierr = PetscOptionsTail();CHKERRQ(ierr);
682   PetscFunctionReturn(0);
683 }
684 
685 /*------------------------------------------------------------------------------------*/
686 
687 EXTERN_C_BEGIN
688 #undef __FUNCT__
689 #define __FUNCT__ "PCGASMSetSubdomains_GASM"
690 PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
691 {
692   PC_GASM        *osm = (PC_GASM*)pc->data;
693   PetscErrorCode ierr;
694   PetscInt       i;
695 
696   PetscFunctionBegin;
697   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
698   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
699 
700   if (!pc->setupcalled) {
701     osm->n   = n;
702     osm->ois = 0;
703     osm->iis = 0;
704     if (ois) {
705       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
706     }
707     if (iis) {
708       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);}
709     }
710     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
711     if (ois) {
712       ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
713       for (i=0; i<n; i++) osm->ois[i] = ois[i];
714       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
715       osm->overlap = -1;
716       if (!iis) {
717         ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
718         for (i=0; i<n; i++) {
719           for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
720           osm->iis[i] = ois[i];
721         }
722       }
723     }
724     if (iis) {
725       ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
726       for (i=0; i<n; i++) osm->iis[i] = iis[i];
727       if (!ois) {
728         ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
729         for (i=0; i<n; i++) {
730           for (i=0; i<n; i++) {
731             ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);
732             osm->ois[i] = iis[i];
733           }
734         }
735         if (osm->overlap > 0) {
736           /* Extend the "overlapping" regions by a number of steps */
737           ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr);
738         }
739       }
740     }
741   }
742   PetscFunctionReturn(0);
743 }
744 EXTERN_C_END
745 
746 EXTERN_C_BEGIN
747 #undef __FUNCT__
748 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
749 PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local)
750 {
751   PC_GASM        *osm = (PC_GASM*)pc->data;
752   PetscErrorCode ierr;
753   PetscMPIInt    rank,size;
754   PetscInt       n;
755   PetscInt       Nmin, Nmax;
756 
757   PetscFunctionBegin;
758   if (!create_local) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
759   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
760   ierr = MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPIU_MIN,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
761   ierr = MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPIU_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
762   if (Nmin != Nmax) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);
763 
764   osm->create_local = create_local;
765   /*
766      Split the subdomains equally among all processors
767   */
768   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
769   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
770   n    = N/size + ((N % size) > rank);
771   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
772   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
773   if (!pc->setupcalled) {
774     ierr      = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
775     osm->N    = N;
776     osm->n    = n;
777     osm->nmax = N/size + ((N%size) ? 1 : 0);
778     osm->ois  = 0;
779     osm->iis  = 0;
780   }
781   PetscFunctionReturn(0);
782 }
783 EXTERN_C_END
784 
785 EXTERN_C_BEGIN
786 #undef __FUNCT__
787 #define __FUNCT__ "PCGASMSetOverlap_GASM"
788 PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
789 {
790   PC_GASM *osm = (PC_GASM*)pc->data;
791 
792   PetscFunctionBegin;
793   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
794   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
795   if (!pc->setupcalled) osm->overlap = ovl;
796   PetscFunctionReturn(0);
797 }
798 EXTERN_C_END
799 
800 EXTERN_C_BEGIN
801 #undef __FUNCT__
802 #define __FUNCT__ "PCGASMSetType_GASM"
803 PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
804 {
805   PC_GASM *osm = (PC_GASM*)pc->data;
806 
807   PetscFunctionBegin;
808   osm->type     = type;
809   osm->type_set = PETSC_TRUE;
810   PetscFunctionReturn(0);
811 }
812 EXTERN_C_END
813 
814 EXTERN_C_BEGIN
815 #undef __FUNCT__
816 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
817 PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool doSort)
818 {
819   PC_GASM *osm = (PC_GASM*)pc->data;
820 
821   PetscFunctionBegin;
822   osm->sort_indices = doSort;
823   PetscFunctionReturn(0);
824 }
825 EXTERN_C_END
826 
827 EXTERN_C_BEGIN
828 #undef __FUNCT__
829 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
830 /*
831    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
832         In particular, it would upset the global subdomain number calculation.
833 */
834 PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
835 {
836   PC_GASM        *osm = (PC_GASM*)pc->data;
837   PetscErrorCode ierr;
838 
839   PetscFunctionBegin;
840   if (osm->n < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
841 
842   if (n) *n = osm->n;
843   if (first) {
844     ierr    = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
845     *first -= osm->n;
846   }
847   if (ksp) {
848     /* Assume that local solves are now different; not necessarily
849        true, though!  This flag is used only for PCView_GASM() */
850     *ksp                        = osm->ksp;
851     osm->same_subdomain_solvers = PETSC_FALSE;
852   }
853   PetscFunctionReturn(0);
854 } /* PCGASMGetSubKSP_GASM() */
855 EXTERN_C_END
856 
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "PCGASMSetSubdomains"
860 /*@C
861     PCGASMSetSubdomains - Sets the subdomains for this processor
862     for the additive Schwarz preconditioner.
863 
864     Collective on PC
865 
866     Input Parameters:
867 +   pc  - the preconditioner context
868 .   n   - the number of subdomains for this processor
869 .   iis - the index sets that define this processor's local inner subdomains
870          (or NULL for PETSc to determine subdomains)
871 -   ois- the index sets that define this processor's local outer subdomains
872          (or NULL to use the same as iis)
873 
874     Notes:
875     The IS indices use the parallel, global numbering of the vector entries.
876     Inner subdomains are those where the correction is applied.
877     Outer subdomains are those where the residual necessary to obtain the
878     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
879     Both inner and outer subdomains can extend over several processors.
880     This processor's portion of a subdomain is known as a local subdomain.
881 
882     By default the GASM preconditioner uses 1 (local) subdomain per processor.
883     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
884     all processors that PCGASM will create automatically, and to specify whether
885     they should be local or not.
886 
887 
888     Level: advanced
889 
890 .keywords: PC, GASM, set, subdomains, additive Schwarz
891 
892 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
893           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
894 @*/
895 PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
896 {
897   PetscErrorCode ierr;
898 
899   PetscFunctionBegin;
900   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
901   ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "PCGASMSetTotalSubdomains"
907 /*@C
908     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
909     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
910     communicator. Either all or no processors in the PC communicator must call this routine with
911     the same N.  The subdomains will be created automatically during PCSetUp().
912 
913     Collective on PC
914 
915     Input Parameters:
916 +   pc           - the preconditioner context
917 .   N            - the total number of subdomains cumulative across all processors
918 -   create_local - whether the subdomains to be created are to be local
919 
920     Options Database Key:
921     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
922 +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
923 -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)
924 
925     By default the GASM preconditioner uses 1 subdomain per processor.
926 
927 
928     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
929     of subdomains per processor.
930 
931     Level: advanced
932 
933 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
934 
935 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
936           PCGASMCreateSubdomains2D()
937 @*/
938 PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
939 {
940   PetscErrorCode ierr;
941 
942   PetscFunctionBegin;
943   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
944   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr);
945   PetscFunctionReturn(0);
946 }
947 
948 #undef __FUNCT__
949 #define __FUNCT__ "PCGASMSetOverlap"
950 /*@
951     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
952     additive Schwarz preconditioner.  Either all or no processors in the
953     PC communicator must call this routine.
954 
955     Logically Collective on PC
956 
957     Input Parameters:
958 +   pc  - the preconditioner context
959 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
960 
961     Options Database Key:
962 .   -pc_gasm_overlap <overlap> - Sets overlap
963 
964     Notes:
965     By default the GASM preconditioner uses 1 subdomain per processor.  To use
966     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
967     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).
968 
969     The overlap defaults to 1, so if one desires that no additional
970     overlap be computed beyond what may have been set with a call to
971     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
972     must be set to be 0.  In particular, if one does not explicitly set
973     the subdomains in application code, then all overlap would be computed
974     internally by PETSc, and using an overlap of 0 would result in an GASM
975     variant that is equivalent to the block Jacobi preconditioner.
976 
977     Note that one can define initial index sets with any overlap via
978     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
979     PETSc to extend that overlap further, if desired.
980 
981     Level: intermediate
982 
983 .keywords: PC, GASM, set, overlap
984 
985 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
986           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
987 @*/
988 PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
989 {
990   PetscErrorCode ierr;
991 
992   PetscFunctionBegin;
993   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
994   PetscValidLogicalCollectiveInt(pc,ovl,2);
995   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
996   PetscFunctionReturn(0);
997 }
998 
999 #undef __FUNCT__
1000 #define __FUNCT__ "PCGASMSetType"
1001 /*@
1002     PCGASMSetType - Sets the type of restriction and interpolation used
1003     for local problems in the additive Schwarz method.
1004 
1005     Logically Collective on PC
1006 
1007     Input Parameters:
1008 +   pc  - the preconditioner context
1009 -   type - variant of GASM, one of
1010 .vb
1011       PC_GASM_BASIC       - full interpolation and restriction
1012       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1013       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1014       PC_GASM_NONE        - local processor restriction and interpolation
1015 .ve
1016 
1017     Options Database Key:
1018 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1019 
1020     Level: intermediate
1021 
1022 .keywords: PC, GASM, set, type
1023 
1024 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1025           PCGASMCreateSubdomains2D()
1026 @*/
1027 PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1028 {
1029   PetscErrorCode ierr;
1030 
1031   PetscFunctionBegin;
1032   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1033   PetscValidLogicalCollectiveEnum(pc,type,2);
1034   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 #undef __FUNCT__
1039 #define __FUNCT__ "PCGASMSetSortIndices"
1040 /*@
1041     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1042 
1043     Logically Collective on PC
1044 
1045     Input Parameters:
1046 +   pc  - the preconditioner context
1047 -   doSort - sort the subdomain indices
1048 
1049     Level: intermediate
1050 
1051 .keywords: PC, GASM, set, type
1052 
1053 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1054           PCGASMCreateSubdomains2D()
1055 @*/
1056 PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool doSort)
1057 {
1058   PetscErrorCode ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1062   PetscValidLogicalCollectiveBool(pc,doSort,2);
1063   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1064   PetscFunctionReturn(0);
1065 }
1066 
1067 #undef __FUNCT__
1068 #define __FUNCT__ "PCGASMGetSubKSP"
1069 /*@C
1070    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1071    this processor.
1072 
1073    Collective on PC iff first_local is requested
1074 
1075    Input Parameter:
1076 .  pc - the preconditioner context
1077 
1078    Output Parameters:
1079 +  n_local - the number of blocks on this processor or NULL
1080 .  first_local - the global number of the first block on this processor or NULL,
1081                  all processors must request or all must pass NULL
1082 -  ksp - the array of KSP contexts
1083 
1084    Note:
1085    After PCGASMGetSubKSP() the array of KSPes is not to be freed
1086 
1087    Currently for some matrix implementations only 1 block per processor
1088    is supported.
1089 
1090    You must call KSPSetUp() before calling PCGASMGetSubKSP().
1091 
1092    Level: advanced
1093 
1094 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1095 
1096 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1097           PCGASMCreateSubdomains2D(),
1098 @*/
1099 PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1100 {
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1105   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 /* -------------------------------------------------------------------------------------*/
1110 /*MC
1111    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1112            its own KSP object.
1113 
1114    Options Database Keys:
1115 +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1116 .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1117 .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1118 .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1119 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1120 
1121      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1122       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1123       -pc_gasm_type basic to use the standard GASM.
1124 
1125    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1126      than one processor. Defaults to one block per processor.
1127 
1128      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1129         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1130 
1131      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1132          and set the options directly on the resulting KSP object (you can access its PC
1133          with KSPGetPC())
1134 
1135 
1136    Level: beginner
1137 
1138    Concepts: additive Schwarz method
1139 
1140     References:
1141     An additive variant of the Schwarz alternating method for the case of many subregions
1142     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1143 
1144     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1145     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1146 
1147 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1148            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1149            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1150 
1151 M*/
1152 
1153 EXTERN_C_BEGIN
1154 #undef __FUNCT__
1155 #define __FUNCT__ "PCCreate_GASM"
1156 PetscErrorCode  PCCreate_GASM(PC pc)
1157 {
1158   PetscErrorCode ierr;
1159   PC_GASM        *osm;
1160 
1161   PetscFunctionBegin;
1162   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
1163 
1164   osm->N                      = PETSC_DECIDE;
1165   osm->n                      = PETSC_DECIDE;
1166   osm->nmax                   = 0;
1167   osm->overlap                = 1;
1168   osm->ksp                    = 0;
1169   osm->gorestriction          = 0;
1170   osm->girestriction          = 0;
1171   osm->gx                     = 0;
1172   osm->gy                     = 0;
1173   osm->x                      = 0;
1174   osm->y                      = 0;
1175   osm->ois                    = 0;
1176   osm->iis                    = 0;
1177   osm->pmat                   = 0;
1178   osm->type                   = PC_GASM_RESTRICT;
1179   osm->same_subdomain_solvers = PETSC_TRUE;
1180   osm->sort_indices           = PETSC_TRUE;
1181   osm->dm_subdomains          = PETSC_FALSE;
1182 
1183   pc->data                 = (void*)osm;
1184   pc->ops->apply           = PCApply_GASM;
1185   pc->ops->applytranspose  = PCApplyTranspose_GASM;
1186   pc->ops->setup           = PCSetUp_GASM;
1187   pc->ops->reset           = PCReset_GASM;
1188   pc->ops->destroy         = PCDestroy_GASM;
1189   pc->ops->setfromoptions  = PCSetFromOptions_GASM;
1190   pc->ops->setuponblocks   = PCSetUpOnBlocks_GASM;
1191   pc->ops->view            = PCView_GASM;
1192   pc->ops->applyrichardson = 0;
1193 
1194   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSubdomains_C","PCGASMSetSubdomains_GASM",PCGASMSetSubdomains_GASM);CHKERRQ(ierr);
1195   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1196   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1197   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",PCGASMSetType_GASM);CHKERRQ(ierr);
1198   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1199   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1200   PetscFunctionReturn(0);
1201 }
1202 EXTERN_C_END
1203 
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "PCGASMCreateLocalSubdomains"
1207 /*@C
1208    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1209    Schwarz preconditioner for a any problem based on its matrix.
1210 
1211    Collective
1212 
1213    Input Parameters:
1214 +  A       - The global matrix operator
1215 .  overlap - amount of overlap in outer subdomains
1216 -  n       - the number of local subdomains
1217 
1218    Output Parameters:
1219 +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1220 -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)
1221 
1222    Level: advanced
1223 
1224    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1225          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1226          nonzero overlap with PCGASMSetOverlap()
1227 
1228     In the Fortran version you must provide the array outis[] already allocated of length n.
1229 
1230 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1231 
1232 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1233 @*/
1234 PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS *iis[], IS *ois[])
1235 {
1236   MatPartitioning mpart;
1237   const char      *prefix;
1238   PetscErrorCode  (*f)(Mat,MatReuse,Mat*);
1239   PetscMPIInt     size;
1240   PetscInt        i,j,rstart,rend,bs;
1241   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1242   Mat             Ad     = NULL, adj;
1243   IS              ispart,isnumb,*is;
1244   PetscErrorCode  ierr;
1245 
1246   PetscFunctionBegin;
1247   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1248   PetscValidPointer(iis,4);
1249   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1250 
1251   /* Get prefix, row distribution, and block size */
1252   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1253   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1254   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1255   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1256 
1257   /* Get diagonal block from matrix if possible */
1258   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
1259   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1260   if (f) {
1261     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1262   } else if (size == 1) {
1263     Ad = A;
1264   }
1265   if (Ad) {
1266     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1267     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1268   }
1269   if (Ad && n > 1) {
1270     PetscBool  match,done;
1271     /* Try to setup a good matrix partitioning if available */
1272     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1273     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1274     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1275     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1276     if (!match) {
1277       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1278     }
1279     if (!match) { /* assume a "good" partitioner is available */
1280       PetscInt       na;
1281       const PetscInt *ia,*ja;
1282       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1283       if (done) {
1284         /* Build adjacency matrix by hand. Unfortunately a call to
1285            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1286            remove the block-aij structure and we cannot expect
1287            MatPartitioning to split vertices as we need */
1288         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1289         const PetscInt *row;
1290         nnz = 0;
1291         for (i=0; i<na; i++) { /* count number of nonzeros */
1292           len = ia[i+1] - ia[i];
1293           row = ja + ia[i];
1294           for (j=0; j<len; j++) {
1295             if (row[j] == i) { /* don't count diagonal */
1296               len--; break;
1297             }
1298           }
1299           nnz += len;
1300         }
1301         ierr   = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1302         ierr   = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1303         nnz    = 0;
1304         iia[0] = 0;
1305         for (i=0; i<na; i++) { /* fill adjacency */
1306           cnt = 0;
1307           len = ia[i+1] - ia[i];
1308           row = ja + ia[i];
1309           for (j=0; j<len; j++) {
1310             if (row[j] != i) jja[nnz+cnt++] = row[j]; /* if not diagonal */
1311           }
1312           nnz += cnt;
1313           iia[i+1] = nnz;
1314         }
1315         /* Partitioning of the adjacency matrix */
1316         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1317         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1318         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1319         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1320         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1321         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1322         foundpart = PETSC_TRUE;
1323       }
1324       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1325     }
1326     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1327   }
1328   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1329   if (!foundpart) {
1330 
1331     /* Partitioning by contiguous chunks of rows */
1332 
1333     PetscInt mbs   = (rend-rstart)/bs;
1334     PetscInt start = rstart;
1335     for (i=0; i<n; i++) {
1336       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1337       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1338       start += count;
1339     }
1340 
1341   } else {
1342 
1343     /* Partitioning by adjacency of diagonal block  */
1344 
1345     const PetscInt *numbering;
1346     PetscInt       *count,nidx,*indices,*newidx,start=0;
1347     /* Get node count in each partition */
1348     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1349     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1350     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1351       for (i=0; i<n; i++) count[i] *= bs;
1352     }
1353     /* Build indices from node numbering */
1354     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1355     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1356     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1357     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1358     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1359     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1360     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1361       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1362       for (i=0; i<nidx; i++) {
1363         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1364       }
1365       ierr    = PetscFree(indices);CHKERRQ(ierr);
1366       nidx   *= bs;
1367       indices = newidx;
1368     }
1369     /* Shift to get global indices */
1370     for (i=0; i<nidx; i++) indices[i] += rstart;
1371 
1372     /* Build the index sets for each block */
1373     for (i=0; i<n; i++) {
1374       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1375       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1376       start += count[i];
1377     }
1378 
1379     ierr = PetscFree(count);
1380     ierr = PetscFree(indices);
1381     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1382     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1383   }
1384   *iis = is;
1385   if (!ois) PetscFunctionReturn(0);
1386   /*
1387    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1388    has been requested, copy the inner subdomains over so they can be modified.
1389    */
1390   ierr = PetscMalloc(n*sizeof(IS),ois);CHKERRQ(ierr);
1391   for (i=0; i<n; ++i) {
1392     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1393       ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr);
1394       ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr);
1395     } else {
1396       ierr      = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr);
1397       (*ois)[i] = (*iis)[i];
1398     }
1399   }
1400   if (overlap > 0) {
1401     /* Extend the "overlapping" regions by a number of steps */
1402     ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr);
1403   }
1404   PetscFunctionReturn(0);
1405 }
1406 
1407 #undef __FUNCT__
1408 #define __FUNCT__ "PCGASMDestroySubdomains"
1409 /*@C
1410    PCGASMDestroySubdomains - Destroys the index sets created with
1411    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1412    called after setting subdomains with PCGASMSetSubdomains().
1413 
1414    Collective
1415 
1416    Input Parameters:
1417 +  n   - the number of index sets
1418 .  iis - the array of inner subdomains,
1419 -  ois - the array of outer subdomains, can be NULL
1420 
1421    Level: intermediate
1422 
1423    Notes: this is merely a convenience subroutine that walks each list,
1424    destroys each IS on the list, and then frees the list.
1425 
1426 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1427 
1428 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1429 @*/
1430 PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1431 {
1432   PetscInt       i;
1433   PetscErrorCode ierr;
1434 
1435   PetscFunctionBegin;
1436   if (n <= 0) PetscFunctionReturn(0);
1437   if (iis) {
1438     PetscValidPointer(iis,2);
1439     for (i=0; i<n; i++) {
1440       ierr = ISDestroy(&iis[i]);CHKERRQ(ierr);
1441     }
1442     ierr = PetscFree(iis);CHKERRQ(ierr);
1443   }
1444   if (ois) {
1445     for (i=0; i<n; i++) {
1446       ierr = ISDestroy(&ois[i]);CHKERRQ(ierr);
1447     }
1448     ierr = PetscFree(ois);CHKERRQ(ierr);
1449   }
1450   PetscFunctionReturn(0);
1451 }
1452 
1453 
1454 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1455   {                                                                                                       \
1456     PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1457     /*                                                                                                    \
1458      Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1459      of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1460      subdomain).                                                                                          \
1461      Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1462      of the intersection.                                                                                 \
1463     */                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             \
1464     /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1465     *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1466     /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1467     *xleft_loc = *ylow_loc==first_row ? PetscMax(first%M,xleft) : xleft;                                                                            \
1468     /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1469     *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1470     /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1471     *xright_loc = *yhigh_loc==last_row ? PetscMin(xright,last%M) : xright;                                                                          \
1472     /* Now compute the size of the local subdomain n. */ \
1473     *n = 0;                                               \
1474     if (*ylow_loc < *yhigh_loc) {                           \
1475       PetscInt width = xright-xleft;                     \
1476       *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1477       *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1478       *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1479     } \
1480   }
1481 
1482 
1483 
1484 #undef __FUNCT__
1485 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1486 /*@
1487    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1488    preconditioner for a two-dimensional problem on a regular grid.
1489 
1490    Collective
1491 
1492    Input Parameters:
1493 +  M, N               - the global number of grid points in the x and y directions
1494 .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1495 .  dof                - degrees of freedom per node
1496 -  overlap            - overlap in mesh lines
1497 
1498    Output Parameters:
1499 +  Nsub - the number of local subdomains created
1500 .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1501 -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1502 
1503 
1504    Level: advanced
1505 
1506 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1507 
1508 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1509           PCGASMSetOverlap()
1510 @*/
1511 PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1512 {
1513   PetscErrorCode ierr;
1514   PetscMPIInt    size, rank;
1515   PetscInt       i, j;
1516   PetscInt       maxheight, maxwidth;
1517   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1518   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1519   PetscInt       x[2][2], y[2][2], n[2];
1520   PetscInt       first, last;
1521   PetscInt       nidx, *idx;
1522   PetscInt       ii,jj,s,q,d;
1523   PetscInt       k,kk;
1524   PetscMPIInt    color;
1525   MPI_Comm       comm, subcomm;
1526   IS             **xis = 0, **is = ois, **is_local = iis;
1527 
1528   PetscFunctionBegin;
1529   ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr);
1530   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1531   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1532   ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr);
1533   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1534                                       "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1535 
1536   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1537   s      = 0;
1538   ystart = 0;
1539   for (j=0; j<Ndomains; ++j) {
1540     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1541     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1542     /* Vertical domain limits with an overlap. */
1543     ylow   = PetscMax(ystart - overlap,0);
1544     yhigh  = PetscMin(ystart + maxheight + overlap,N);
1545     xstart = 0;
1546     for (i=0; i<Mdomains; ++i) {
1547       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1548       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1549       /* Horizontal domain limits with an overlap. */
1550       xleft  = PetscMax(xstart - overlap,0);
1551       xright = PetscMin(xstart + maxwidth + overlap,M);
1552       /*
1553          Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1554       */
1555       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1556       if (nidx) ++s;
1557       xstart += maxwidth;
1558     } /* for (i = 0; i < Mdomains; ++i) */
1559     ystart += maxheight;
1560   } /* for (j = 0; j < Ndomains; ++j) */
1561 
1562   /* Now we can allocate the necessary number of ISs. */
1563   *nsub  = s;
1564   ierr   = PetscMalloc((*nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1565   ierr   = PetscMalloc((*nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1566   s      = 0;
1567   ystart = 0;
1568   for (j=0; j<Ndomains; ++j) {
1569     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1570     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1571     /* Vertical domain limits with an overlap. */
1572     y[0][0] = PetscMax(ystart - overlap,0);
1573     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1574     /* Vertical domain limits without an overlap. */
1575     y[1][0] = ystart;
1576     y[1][1] = PetscMin(ystart + maxheight,N);
1577     xstart  = 0;
1578     for (i=0; i<Mdomains; ++i) {
1579       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1580       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1581       /* Horizontal domain limits with an overlap. */
1582       x[0][0] = PetscMax(xstart - overlap,0);
1583       x[0][1] = PetscMin(xstart + maxwidth + overlap,M);
1584       /* Horizontal domain limits without an overlap. */
1585       x[1][0] = xstart;
1586       x[1][1] = PetscMin(xstart+maxwidth,M);
1587       /*
1588          Determine whether this domain intersects this processor's ownership range of pc->pmat.
1589          Do this twice: first for the domains with overlaps, and once without.
1590          During the first pass create the subcommunicators, and use them on the second pass as well.
1591       */
1592       for (q = 0; q < 2; ++q) {
1593         /*
1594           domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1595           according to whether the domain with an overlap or without is considered.
1596         */
1597         xleft = x[q][0]; xright = x[q][1];
1598         ylow  = y[q][0]; yhigh  = y[q][1];
1599         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1600         nidx *= dof;
1601         n[q]  = nidx;
1602         /*
1603          Based on the counted number of indices in the local domain *with an overlap*,
1604          construct a subcommunicator of all the processors supporting this domain.
1605          Observe that a domain with an overlap might have nontrivial local support,
1606          while the domain without an overlap might not.  Hence, the decision to participate
1607          in the subcommunicator must be based on the domain with an overlap.
1608          */
1609         if (q == 0) {
1610           if (nidx) color = 1;
1611           else color = MPI_UNDEFINED;
1612 
1613           ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr);
1614         }
1615         /*
1616          Proceed only if the number of local indices *with an overlap* is nonzero.
1617          */
1618         if (n[0]) {
1619           if (q == 0) xis = is;
1620           if (q == 1) {
1621             /*
1622              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1623              Moreover, if the overlap is zero, the two ISs are identical.
1624              */
1625             if (overlap == 0) {
1626               (*is_local)[s] = (*is)[s];
1627               ierr           = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr);
1628               continue;
1629             } else {
1630               xis     = is_local;
1631               subcomm = ((PetscObject)(*is)[s])->comm;
1632             }
1633           } /* if (q == 1) */
1634           idx  = NULL;
1635           ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1636           if (nidx) {
1637             k = 0;
1638             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1639               PetscInt x0 = (jj==ylow_loc) ? xleft_loc : xleft;
1640               PetscInt x1 = (jj==yhigh_loc-1) ? xright_loc : xright;
1641               kk = dof*(M*jj + x0);
1642               for (ii=x0; ii<x1; ++ii) {
1643                 for (d = 0; d < dof; ++d) {
1644                   idx[k++] = kk++;
1645                 }
1646               }
1647             }
1648           }
1649           ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr);
1650         }/* if (n[0]) */
1651       }/* for (q = 0; q < 2; ++q) */
1652       if (n[0]) ++s;
1653       xstart += maxwidth;
1654     } /* for (i = 0; i < Mdomains; ++i) */
1655     ystart += maxheight;
1656   } /* for (j = 0; j < Ndomains; ++j) */
1657   PetscFunctionReturn(0);
1658 }
1659 
1660 #undef __FUNCT__
1661 #define __FUNCT__ "PCGASMGetSubdomains"
1662 /*@C
1663     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1664     for the additive Schwarz preconditioner.
1665 
1666     Not Collective
1667 
1668     Input Parameter:
1669 .   pc - the preconditioner context
1670 
1671     Output Parameters:
1672 +   n   - the number of subdomains for this processor (default value = 1)
1673 .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be NULL)
1674 -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be NULL)
1675 
1676 
1677     Notes:
1678     The user is responsible for destroying the ISs and freeing the returned arrays.
1679     The IS numbering is in the parallel, global numbering of the vector.
1680 
1681     Level: advanced
1682 
1683 .keywords: PC, GASM, get, subdomains, additive Schwarz
1684 
1685 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1686           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1687 @*/
1688 PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1689 {
1690   PC_GASM        *osm;
1691   PetscErrorCode ierr;
1692   PetscBool      match;
1693   PetscInt       i;
1694 
1695   PetscFunctionBegin;
1696   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1697   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1698   if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1699   osm = (PC_GASM*)pc->data;
1700   if (n) *n = osm->n;
1701   if (iis) {
1702     ierr = PetscMalloc(osm->n*sizeof(IS), iis);CHKERRQ(ierr);
1703   }
1704   if (ois) {
1705     ierr = PetscMalloc(osm->n*sizeof(IS), ois);CHKERRQ(ierr);
1706   }
1707   if (iis || ois) {
1708     for (i = 0; i < osm->n; ++i) {
1709       if (iis) (*iis)[i] = osm->iis[i];
1710       if (ois) (*ois)[i] = osm->ois[i];
1711     }
1712   }
1713   PetscFunctionReturn(0);
1714 }
1715 
1716 #undef __FUNCT__
1717 #define __FUNCT__ "PCGASMGetSubmatrices"
1718 /*@C
1719     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1720     only) for the additive Schwarz preconditioner.
1721 
1722     Not Collective
1723 
1724     Input Parameter:
1725 .   pc - the preconditioner context
1726 
1727     Output Parameters:
1728 +   n   - the number of matrices for this processor (default value = 1)
1729 -   mat - the matrices
1730 
1731     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1732            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1733            subdomains were defined using PCGASMSetTotalSubdomains().
1734     Level: advanced
1735 
1736 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1737 
1738 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1739           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1740 @*/
1741 PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1742 {
1743   PC_GASM        *osm;
1744   PetscErrorCode ierr;
1745   PetscBool      match;
1746 
1747   PetscFunctionBegin;
1748   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1749   PetscValidIntPointer(n,2);
1750   if (mat) PetscValidPointer(mat,3);
1751   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1752   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1753   if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1754   osm = (PC_GASM*)pc->data;
1755   if (n) *n = osm->n;
1756   if (mat) *mat = osm->pmat;
1757   PetscFunctionReturn(0);
1758 }
1759 
1760 #undef __FUNCT__
1761 #define __FUNCT__ "PCGASMSetDMSubdomains"
1762 /*@
1763     PCGASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1764     Logically Collective
1765 
1766     Input Parameter:
1767 +   pc  - the preconditioner
1768 -   flg - boolean indicating whether to use subdomains defined by the DM
1769 
1770     Options Database Key:
1771 .   -pc_gasm_dm_subdomains
1772 
1773     Level: intermediate
1774 
1775     Notes:
1776     PCGASMSetTotalSubdomains() and PCGASMSetOverlap() take precedence over PCGASMSetDMSubdomains(),
1777     so setting either of the first two effectively turns the latter off.
1778 
1779 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1780 
1781 .seealso: PCGASMGetDMSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap()
1782           PCGASMCreateSubdomains2D()
1783 @*/
1784 PetscErrorCode  PCGASMSetDMSubdomains(PC pc,PetscBool flg)
1785 {
1786   PC_GASM        *osm = (PC_GASM*)pc->data;
1787   PetscErrorCode ierr;
1788   PetscBool      match;
1789 
1790   PetscFunctionBegin;
1791   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1792   PetscValidLogicalCollectiveBool(pc,flg,2);
1793   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1794   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1795   if (match) {
1796     osm->dm_subdomains = flg;
1797   }
1798   PetscFunctionReturn(0);
1799 }
1800 
1801 #undef __FUNCT__
1802 #define __FUNCT__ "PCGASMGetDMSubdomains"
1803 /*@
1804     PCGASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1805     Not Collective
1806 
1807     Input Parameter:
1808 .   pc  - the preconditioner
1809 
1810     Output Parameter:
1811 .   flg - boolean indicating whether to use subdomains defined by the DM
1812 
1813     Level: intermediate
1814 
1815 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1816 
1817 .seealso: PCGASMSetDMSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap()
1818           PCGASMCreateSubdomains2D()
1819 @*/
1820 PetscErrorCode  PCGASMGetDMSubdomains(PC pc,PetscBool* flg)
1821 {
1822   PC_GASM        *osm = (PC_GASM*)pc->data;
1823   PetscErrorCode ierr;
1824   PetscBool      match;
1825 
1826   PetscFunctionBegin;
1827   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1828   PetscValidPointer(flg,2);
1829   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1830   if (match) {
1831     if (flg) *flg = osm->dm_subdomains;
1832   }
1833   PetscFunctionReturn(0);
1834 }
1835