xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision 5fd668637986a8d8518383a9159eebc368e1d5b4)
1 /*
2   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
3   In this version each processor may intersect multiple subdomains and any subdomain may
4   intersect multiple processors.  Intersections of subdomains with processors are called *local
5   subdomains*.
6 
7        N    - total number of local subdomains on all processors  (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM())
8        n    - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains())
9        nmax - maximum number of local subdomains per processor    (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM())
10 */
11 #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
12 
13 typedef struct {
14   PetscInt   N,n,nmax;
15   PetscInt   overlap;             /* overlap requested by user */
16   KSP        *ksp;                /* linear solvers for each block */
17   Vec        gx,gy;               /* Merged work vectors */
18   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
19   VecScatter gorestriction;       /* merged restriction to disjoint union of outer subdomains */
20   VecScatter girestriction;       /* merged restriction to disjoint union of inner subdomains */
21   IS         *ois;                /* index sets that define the outer (conceptually, overlapping) subdomains */
22   IS         *iis;                /* index sets that define the inner (conceptually, nonoverlapping) subdomains */
23   Mat        *pmat;               /* subdomain block matrices */
24   PCGASMType type;                /* use reduced interpolation, restriction or both */
25   PetscBool  create_local;           /* whether the autocreated subdomains are local or not. */
26   PetscBool  type_set;               /* if user set this value (so won't change it for symmetric problems) */
27   PetscBool  same_subdomain_solvers; /* flag indicating whether all local solvers are same */
28   PetscBool  sort_indices;           /* flag to sort subdomain indices */
29 } PC_GASM;
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PCGASMSubdomainView_Private"
33 static PetscErrorCode  PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer)
34 {
35   PC_GASM        *osm  = (PC_GASM*)pc->data;
36   PetscInt       j,nidx;
37   const PetscInt *idx;
38   PetscViewer    sviewer;
39   char           *cidx;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBegin;
43   if (i < 0 || i > osm->n) SETERRQ2(((PetscObject)viewer)->comm, PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n);
44   /* Inner subdomains. */
45   ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr);
46   /*
47    No more than 15 characters per index plus a space.
48    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
49    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
50    For nidx == 0, the whole string 16 '\0'.
51    */
52   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
53   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
54   ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr);
55   for (j = 0; j < nidx; ++j) {
56     ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr);
57   }
58   ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr);
59   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
60   ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr);
61   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
62   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
63   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
64   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
65   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
66   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
67   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68   ierr = PetscFree(cidx);CHKERRQ(ierr);
69   /* Outer subdomains. */
70   ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr);
71   /*
72    No more than 15 characters per index plus a space.
73    PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx,
74    in case nidx == 0. That will take care of the space for the trailing '\0' as well.
75    For nidx == 0, the whole string 16 '\0'.
76    */
77   ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr);
78   ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr);
79   ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr);
80   for (j = 0; j < nidx; ++j) {
81     ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr);
82   }
83   ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
84   ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr);
85   ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr);
86   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
87   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
88   ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr);
89   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
91   ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr);
92   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
93   ierr = PetscFree(cidx);CHKERRQ(ierr);
94 
95   PetscFunctionReturn(0);
96 }
97 
98 #undef __FUNCT__
99 #define __FUNCT__ "PCGASMPrintSubdomains"
100 static PetscErrorCode  PCGASMPrintSubdomains(PC pc)
101 {
102   PC_GASM        *osm  = (PC_GASM*)pc->data;
103   const char     *prefix;
104   char           fname[PETSC_MAX_PATH_LEN+1];
105   PetscInt       i, l, d, count, gcount, *permutation, *numbering;
106   PetscBool      found;
107   PetscViewer    viewer, sviewer = PETSC_NULL;
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
112   for (i = 0; i < osm->n; ++i) permutation[i] = i;
113   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
114   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
115   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
116   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr);
117   if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
118   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
119   /*
120    Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer:
121    the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm.
122   */
123   ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
124   l = 0;
125   for (count = 0; count < gcount; ++count) {
126     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
127     if (l<osm->n) {
128       d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
129       if (numbering[d] == count) {
130         ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
131         ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
132         ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
133         ++l;
134       }
135     }
136     ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
137   }
138   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
139   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
140   PetscFunctionReturn(0);
141 }
142 
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "PCView_GASM"
146 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
147 {
148   PC_GASM        *osm = (PC_GASM*)pc->data;
149   const char     *prefix;
150   PetscErrorCode ierr;
151   PetscMPIInt    rank, size;
152   PetscInt       i,bsz;
153   PetscBool      iascii,view_subdomains=PETSC_FALSE;
154   PetscViewer    sviewer;
155   PetscInt       count, l, gcount, *numbering, *permutation;
156   char           overlap[256]     = "user-defined overlap";
157   char           gsubdomains[256] = "unknown total number of subdomains";
158   char           lsubdomains[256] = "unknown number of local  subdomains";
159   char           msubdomains[256] = "unknown max number of local subdomains";
160 
161   PetscFunctionBegin;
162   ierr = MPI_Comm_size(((PetscObject)pc)->comm, &size);CHKERRQ(ierr);
163   ierr = MPI_Comm_rank(((PetscObject)pc)->comm, &rank);CHKERRQ(ierr);
164   ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr);
165   for (i = 0; i < osm->n; ++i) permutation[i] = i;
166   ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr);
167   ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr);
168 
169   if (osm->overlap >= 0) {
170     ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr);
171   }
172   ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);CHKERRQ(ierr);
173   if (osm->N > 0) {
174     ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);CHKERRQ(ierr);
175   }
176   if (osm->nmax > 0) {
177     ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr);
178   }
179 
180   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
181   ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,PETSC_NULL);CHKERRQ(ierr);
182 
183   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
184   if (iascii) {
185     /*
186      Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer:
187      the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed
188      collectively on the comm.
189      */
190     ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr);
191     ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");CHKERRQ(ierr);
192     ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
193     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);CHKERRQ(ierr);
194     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr);
195     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr);
196     ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr);
197     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
198     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",(int)rank,(int)size,osm->n);CHKERRQ(ierr);
199     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
200     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
201     /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */
202     ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr);
203     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
204     ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
205     /* Make sure that everybody waits for the banner to be printed. */
206     ierr = MPI_Barrier(((PetscObject)viewer)->comm);CHKERRQ(ierr);
207     /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */
208     l = 0;
209     for (count = 0; count < gcount; ++count) {
210       PetscMPIInt srank, ssize;
211       if (l<osm->n) {
212         PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */
213         if (numbering[d] == count) {
214           ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr);
215           ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr);
216           ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
217           ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr);
218           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr);
219           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%D:%D] (subcomm [%D:%D]) local subdomain number %D, local size = %D\n",(int)rank,(int)size,(int)srank,(int)ssize,d,bsz);CHKERRQ(ierr);
220           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
221           ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr);
222           if (view_subdomains) {
223             ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr);
224           }
225           if (!pc->setupcalled) {
226             PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr);
227           }
228           else {
229             ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr);
230           }
231           ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
232           ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr);
233           ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr);
234           ++l;
235         }
236       }
237       ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr);
238     }
239     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr);
242   PetscFunctionReturn(0);
243 }
244 
245 
246 
247 
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "PCSetUp_GASM"
251 static PetscErrorCode PCSetUp_GASM(PC pc)
252 {
253   PC_GASM         *osm  = (PC_GASM*)pc->data;
254   PetscErrorCode ierr;
255   PetscBool      symset,flg;
256   PetscInt       i;
257   PetscMPIInt    rank, size;
258   MatReuse       scall = MAT_REUSE_MATRIX;
259   KSP            ksp;
260   PC             subpc;
261   const char     *prefix,*pprefix;
262   Vec            x,y;
263   PetscInt       oni;       /* Number of indices in the i-th local outer subdomain.               */
264   const PetscInt *oidxi;    /* Indices from the i-th subdomain local outer subdomain.             */
265   PetscInt       on;        /* Number of indices in the disjoint union of local outer subdomains. */
266   PetscInt       *oidx;     /* Indices in the disjoint union of local outer subdomains. */
267   IS             gois;      /* Disjoint union the global indices of outer subdomains.             */
268   IS             goid;      /* Identity IS of the size of the disjoint union of outer subdomains. */
269   PetscScalar    *gxarray, *gyarray;
270   PetscInt       gofirst;   /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy --
271                              over the disjoint union of outer subdomains. */
272   DM             *subdomain_dm = PETSC_NULL;
273 
274   PetscFunctionBegin;
275   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
276   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
277   if (!pc->setupcalled) {
278 
279     if (!osm->type_set) {
280       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
281       if (symset && flg) { osm->type = PC_GASM_BASIC; }
282     }
283 
284     /*
285      If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1.
286      The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n.
287      */
288     if (osm->n == PETSC_DECIDE) {
289       /* no subdomains given */
290       /* try pc->dm first */
291       if (pc->dm) {
292         char      ddm_name[1024];
293         DM        ddm;
294         PetscBool flg;
295         PetscInt     num_subdomains, d;
296         char         **subdomain_names;
297         IS           *inner_subdomain_is, *outer_subdomain_is;
298         /* Allow the user to request a decomposition DM by name */
299         ierr = PetscStrncpy(ddm_name, "", 1024);CHKERRQ(ierr);
300         ierr = PetscOptionsString("-pc_gasm_decomposition","Name of the DM defining the decomposition", "PCSetDM",ddm_name,ddm_name,1024,&flg);CHKERRQ(ierr);
301         if (flg) {
302           ierr = DMCreateDomainDecompositionDM(pc->dm, ddm_name, &ddm);CHKERRQ(ierr);
303           if (!ddm) SETERRQ1(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Uknown DM decomposition name %s", ddm_name);
304           ierr = PetscInfo(pc,"Using decomposition DM defined using options database\n");CHKERRQ(ierr);
305           ierr = PCSetDM(pc,ddm);CHKERRQ(ierr);
306         }
307         ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr);
308         if (num_subdomains) {
309           ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr);
310         }
311         for (d = 0; d < num_subdomains; ++d) {
312           if (subdomain_names)    {ierr = PetscFree(subdomain_names[d]);CHKERRQ(ierr);}
313           if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);}
314           if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);}
315         }
316         ierr = PetscFree(subdomain_names);CHKERRQ(ierr);
317         ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr);
318         ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr);
319       }
320       if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */
321         osm->nmax = osm->n = 1;
322         ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
323         osm->N = size;
324       }
325     }
326     if (!osm->iis) {
327       /*
328        The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(),
329        but the actual subdomains have not been supplied (in PCGASMSetSubdomains()).
330        We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now).
331        */
332       ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr);
333     }
334     if (osm->N == PETSC_DECIDE) {
335       struct {PetscInt max,sum;} inwork,outwork;
336       /* determine global number of subdomains and the max number of local subdomains */
337       inwork.max = osm->n;
338       inwork.sum = osm->n;
339       ierr = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
340       osm->nmax = outwork.max;
341       osm->N    = outwork.sum;
342     }
343 
344     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
345     flg  = PETSC_FALSE;
346     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
347     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
348 
349     if (osm->sort_indices) {
350       for (i=0; i<osm->n; i++) {
351         ierr = ISSort(osm->ois[i]);CHKERRQ(ierr);
352         ierr = ISSort(osm->iis[i]);CHKERRQ(ierr);
353       }
354     }
355     /*
356      Merge the ISs, create merged vectors and restrictions.
357      */
358     /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */
359     on = 0;
360     for (i=0; i<osm->n; i++) {
361       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
362       on += oni;
363     }
364     ierr = PetscMalloc(on*sizeof(PetscInt), &oidx);CHKERRQ(ierr);
365     on = 0;
366     for (i=0; i<osm->n; i++) {
367       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
368       ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr);
369       ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr);
370       ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr);
371       on += oni;
372     }
373     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr);
374     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
375     ierr = VecCreateMPI(((PetscObject)pc)->comm, on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr);
376     ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr);
377     ierr = VecGetOwnershipRange(osm->gx, &gofirst, PETSC_NULL);CHKERRQ(ierr);
378     ierr = ISCreateStride(((PetscObject)pc)->comm,on,gofirst,1, &goid);CHKERRQ(ierr);
379     ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr);
380     ierr = VecDestroy(&x);CHKERRQ(ierr);
381     ierr = ISDestroy(&gois);CHKERRQ(ierr);
382     /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */
383     { PetscInt       ini;     /* Number of indices the i-th a local inner subdomain. */
384       PetscInt       in;      /* Number of indices in the disjoint uniont of local inner subdomains. */
385       PetscInt       *iidx;   /* Global indices in the merged local inner subdomain. */
386       PetscInt       *ioidx;  /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
387       IS             giis;    /* IS for the disjoint union of inner subdomains. */
388       IS             giois;   /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */
389       /**/
390       in = 0;
391       for (i=0; i<osm->n; i++) {
392         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
393         in += ini;
394       }
395       ierr = PetscMalloc(in*sizeof(PetscInt), &iidx);CHKERRQ(ierr);
396       ierr = PetscMalloc(in*sizeof(PetscInt), &ioidx);CHKERRQ(ierr);
397       ierr = VecGetOwnershipRange(osm->gx,&gofirst, PETSC_NULL);CHKERRQ(ierr);
398       in = 0;
399       on = 0;
400       for (i=0; i<osm->n; i++) {
401         const PetscInt *iidxi;        /* Global indices of the i-th local inner subdomain. */
402         ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */
403         PetscInt       *ioidxi;       /* Local indices of the i-th local inner subdomain within the local outer subdomain. */
404         PetscInt       ioni;          /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */
405         PetscInt       k;
406         ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr);
407         ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
408         ierr = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr);
409         ierr = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr);
410         ierr = ISLocalToGlobalMappingCreateIS(osm->ois[i],&ltogi);CHKERRQ(ierr);
411         ioidxi = ioidx+in;
412         ierr = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr);
413         ierr = ISLocalToGlobalMappingDestroy(&ltogi);CHKERRQ(ierr);
414         ierr = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr);
415         if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni);
416         for (k = 0; k < ini; ++k) {
417           ioidxi[k] += gofirst+on;
418         }
419         in += ini;
420         on += oni;
421       }
422       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, iidx,  PETSC_OWN_POINTER, &giis);CHKERRQ(ierr);
423       ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr);
424       ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr);
425       ierr = VecDestroy(&y);CHKERRQ(ierr);
426       ierr = ISDestroy(&giis);CHKERRQ(ierr);
427       ierr = ISDestroy(&giois);CHKERRQ(ierr);
428     }
429     ierr = ISDestroy(&goid);CHKERRQ(ierr);
430     /* Create the subdomain work vectors. */
431     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
432     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
433     ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr);
434     ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr);
435     for (i=0, on=0; i<osm->n; ++i, on += oni) {
436       PetscInt oNi;
437       ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr);
438       ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr);
439       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr);
440       ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr);
441     }
442     ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr);
443     ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr);
444     /* Create the local solvers */
445     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
446     for (i=0; i<osm->n; i++) { /* KSPs are local */
447       ierr = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr);
448       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
449       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
450       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
451       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
452       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
453       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
454       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
455       osm->ksp[i] = ksp;
456     }
457     scall = MAT_INITIAL_MATRIX;
458 
459   }/*if (!pc->setupcalled)*/
460   else {
461     /*
462        Destroy the blocks from the previous iteration
463     */
464     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
465       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
466       scall = MAT_INITIAL_MATRIX;
467     }
468   }
469 
470   /*
471      Extract out the submatrices.
472   */
473   if (size > 1) {
474     ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
475   }
476   else {
477     ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr);
478   }
479   if (scall == MAT_INITIAL_MATRIX) {
480     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
481     for (i=0; i<osm->n; i++) {
482       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
483       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
484     }
485   }
486 
487   /* Return control to the user so that the submatrices can be modified (e.g., to apply
488      different boundary conditions for the submatrices than for the global problem) */
489   ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
490 
491   /*
492      Loop over submatrices putting them into local ksps
493   */
494   for (i=0; i<osm->n; i++) {
495     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
496     if (!pc->setupcalled) {
497       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
498     }
499   }
500 
501   PetscFunctionReturn(0);
502 }
503 
504 #undef __FUNCT__
505 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
506 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
507 {
508   PC_GASM         *osm = (PC_GASM*)pc->data;
509   PetscErrorCode ierr;
510   PetscInt       i;
511 
512   PetscFunctionBegin;
513   for (i=0; i<osm->n; i++) {
514     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
515   }
516   PetscFunctionReturn(0);
517 }
518 
519 #undef __FUNCT__
520 #define __FUNCT__ "PCApply_GASM"
521 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
522 {
523   PC_GASM         *osm = (PC_GASM*)pc->data;
524   PetscErrorCode ierr;
525   PetscInt       i;
526   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
527 
528   PetscFunctionBegin;
529   /*
530      Support for limiting the restriction or interpolation only to the inner
531      subdomain values (leaving the other values 0).
532   */
533   if (!(osm->type & PC_GASM_RESTRICT)) {
534     /* have to zero the work RHS since scatter may leave some slots empty */
535     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
536     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
537   }
538   else {
539     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
540   }
541   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
542   if (!(osm->type & PC_GASM_RESTRICT)) {
543     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
544   }
545   else {
546     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
547   }
548   /* do the subdomain solves */
549   for (i=0; i<osm->n; ++i) {
550     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
551   }
552   ierr = VecZeroEntries(y);CHKERRQ(ierr);
553   if (!(osm->type & PC_GASM_INTERPOLATE)) {
554     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
555     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
556   }
557   else {
558     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
559     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);  PetscFunctionReturn(0);
560   }
561 }
562 
563 #undef __FUNCT__
564 #define __FUNCT__ "PCApplyTranspose_GASM"
565 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
566 {
567   PC_GASM         *osm = (PC_GASM*)pc->data;
568   PetscErrorCode ierr;
569   PetscInt       i;
570   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
571 
572   PetscFunctionBegin;
573   /*
574      Support for limiting the restriction or interpolation to only local
575      subdomain values (leaving the other values 0).
576 
577      Note: these are reversed from the PCApply_GASM() because we are applying the
578      transpose of the three terms
579   */
580   if (!(osm->type & PC_GASM_INTERPOLATE)) {
581     /* have to zero the work RHS since scatter may leave some slots empty */
582     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
583     ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
584   }
585   else {
586     ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
587   }
588   ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr);
589   if (!(osm->type & PC_GASM_INTERPOLATE)) {
590     ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
591   }
592   else {
593     ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
594   }
595   /* do the local solves */
596   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
597     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
598   }
599   ierr = VecZeroEntries(y);CHKERRQ(ierr);
600   if (!(osm->type & PC_GASM_RESTRICT)) {
601     ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
602     ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
603   }
604   else {
605     ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
606     ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
607   }
608 
609   PetscFunctionReturn(0);
610 }
611 
612 #undef __FUNCT__
613 #define __FUNCT__ "PCReset_GASM"
614 static PetscErrorCode PCReset_GASM(PC pc)
615 {
616   PC_GASM        *osm = (PC_GASM*)pc->data;
617   PetscErrorCode ierr;
618   PetscInt       i;
619 
620   PetscFunctionBegin;
621   if (osm->ksp) {
622     for (i=0; i<osm->n; i++) {
623       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
624     }
625   }
626   if (osm->pmat) {
627     if (osm->n > 0) {
628       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
629     }
630   }
631   if (osm->x) {
632     for (i=0; i<osm->n; i++) {
633       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
634       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
635     }
636   }
637   ierr = VecDestroy(&osm->gx);CHKERRQ(ierr);
638   ierr = VecDestroy(&osm->gy);CHKERRQ(ierr);
639 
640   ierr = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr);
641   ierr = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr);
642   ierr = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr);
643   osm->ois = 0;
644   osm->iis = 0;
645   PetscFunctionReturn(0);
646 }
647 
648 #undef __FUNCT__
649 #define __FUNCT__ "PCDestroy_GASM"
650 static PetscErrorCode PCDestroy_GASM(PC pc)
651 {
652   PC_GASM         *osm = (PC_GASM*)pc->data;
653   PetscErrorCode ierr;
654   PetscInt       i;
655 
656   PetscFunctionBegin;
657   ierr = PCReset_GASM(pc);CHKERRQ(ierr);
658   if (osm->ksp) {
659     for (i=0; i<osm->n; i++) {
660       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
661     }
662     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
663   }
664   ierr = PetscFree(osm->x);CHKERRQ(ierr);
665   ierr = PetscFree(osm->y);CHKERRQ(ierr);
666   ierr = PetscFree(pc->data);CHKERRQ(ierr);
667   PetscFunctionReturn(0);
668 }
669 
670 #undef __FUNCT__
671 #define __FUNCT__ "PCSetFromOptions_GASM"
672 static PetscErrorCode PCSetFromOptions_GASM(PC pc)
673 {
674   PC_GASM         *osm = (PC_GASM*)pc->data;
675   PetscErrorCode ierr;
676   PetscInt       blocks,ovl;
677   PetscBool      symset,flg;
678   PCGASMType      gasmtype;
679 
680   PetscFunctionBegin;
681   /* set the type to symmetric if matrix is symmetric */
682   if (!osm->type_set && pc->pmat) {
683     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
684     if (symset && flg) { osm->type = PC_GASM_BASIC; }
685   }
686   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
687     ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
688     osm->create_local = PETSC_TRUE;
689     ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,&flg);CHKERRQ(ierr);
690     if (!osm->create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No support for autocreation of nonlocal subdomains yet.");
691 
692     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr); }
693     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
694     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
695     flg  = PETSC_FALSE;
696     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
697     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
698   ierr = PetscOptionsTail();CHKERRQ(ierr);
699   PetscFunctionReturn(0);
700 }
701 
702 /*------------------------------------------------------------------------------------*/
703 
704 EXTERN_C_BEGIN
705 #undef __FUNCT__
706 #define __FUNCT__ "PCGASMSetSubdomains_GASM"
707 PetscErrorCode  PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[])
708 {
709   PC_GASM         *osm = (PC_GASM*)pc->data;
710   PetscErrorCode ierr;
711   PetscInt       i;
712 
713   PetscFunctionBegin;
714   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n);
715   if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp().");
716 
717   if (!pc->setupcalled) {
718     osm->n       = n;
719     osm->ois     = 0;
720     osm->iis     = 0;
721     if (ois) {
722       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
723     }
724     if (iis) {
725       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);}
726     }
727     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
728     if (ois) {
729       ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
730       for (i=0; i<n; i++) { osm->ois[i] = ois[i]; }
731       /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */
732       osm->overlap = -1;
733       if (!iis) {
734         ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
735         for (i=0; i<n; i++) {
736           for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);}
737           osm->iis[i] = ois[i];
738         }
739       }
740     }
741     if (iis) {
742       ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr);
743       for (i=0; i<n; i++) { osm->iis[i] = iis[i]; }
744       if (!ois) {
745         ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr);
746         for (i=0; i<n; i++) {
747           for (i=0; i<n; i++) {
748             ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);
749             osm->ois[i] = iis[i];
750           }
751         }
752         if (osm->overlap > 0) {
753           /* Extend the "overlapping" regions by a number of steps */
754           ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr);
755         }
756       }
757     }
758   }
759   PetscFunctionReturn(0);
760 }
761 EXTERN_C_END
762 
763 EXTERN_C_BEGIN
764 #undef __FUNCT__
765 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
766 PetscErrorCode  PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local)
767 {
768   PC_GASM         *osm = (PC_GASM*)pc->data;
769   PetscErrorCode ierr;
770   PetscMPIInt    rank,size;
771   PetscInt       n;
772   PetscInt       Nmin, Nmax;
773 
774   PetscFunctionBegin;
775   if (!create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains.");
776   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N);
777   ierr = MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPIU_MIN,((PetscObject)pc)->comm);CHKERRQ(ierr);
778   ierr = MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPIU_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr);
779   if (Nmin != Nmax) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains.  min(N) = %D != %D = max(N)", Nmin, Nmax);
780 
781   osm->create_local = create_local;
782   /*
783      Split the subdomains equally among all processors
784   */
785   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
786   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
787   n = N/size + ((N % size) > rank);
788   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N);
789   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
790   if (!pc->setupcalled) {
791     ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr);
792     osm->N            = N;
793     osm->n            = n;
794     osm->nmax         = N/size + ((N%size)?1:0);
795     osm->ois           = 0;
796     osm->iis     = 0;
797   }
798   PetscFunctionReturn(0);
799 }
800 EXTERN_C_END
801 
802 EXTERN_C_BEGIN
803 #undef __FUNCT__
804 #define __FUNCT__ "PCGASMSetOverlap_GASM"
805 PetscErrorCode  PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
806 {
807   PC_GASM *osm = (PC_GASM*)pc->data;
808 
809   PetscFunctionBegin;
810   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
811   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
812   if (!pc->setupcalled) {
813     osm->overlap = ovl;
814   }
815   PetscFunctionReturn(0);
816 }
817 EXTERN_C_END
818 
819 EXTERN_C_BEGIN
820 #undef __FUNCT__
821 #define __FUNCT__ "PCGASMSetType_GASM"
822 PetscErrorCode  PCGASMSetType_GASM(PC pc,PCGASMType type)
823 {
824   PC_GASM *osm = (PC_GASM*)pc->data;
825 
826   PetscFunctionBegin;
827   osm->type     = type;
828   osm->type_set = PETSC_TRUE;
829   PetscFunctionReturn(0);
830 }
831 EXTERN_C_END
832 
833 EXTERN_C_BEGIN
834 #undef __FUNCT__
835 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
836 PetscErrorCode  PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
837 {
838   PC_GASM *osm = (PC_GASM*)pc->data;
839 
840   PetscFunctionBegin;
841   osm->sort_indices = doSort;
842   PetscFunctionReturn(0);
843 }
844 EXTERN_C_END
845 
846 EXTERN_C_BEGIN
847 #undef __FUNCT__
848 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
849 /*
850    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
851         In particular, it would upset the global subdomain number calculation.
852 */
853 PetscErrorCode  PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
854 {
855   PC_GASM         *osm = (PC_GASM*)pc->data;
856   PetscErrorCode ierr;
857 
858   PetscFunctionBegin;
859   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
860 
861   if (n) {
862     *n = osm->n;
863   }
864   if (first) {
865     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
866     *first -= osm->n;
867   }
868   if (ksp) {
869     /* Assume that local solves are now different; not necessarily
870        true, though!  This flag is used only for PCView_GASM() */
871     *ksp                   = osm->ksp;
872     osm->same_subdomain_solvers = PETSC_FALSE;
873   }
874   PetscFunctionReturn(0);
875 }/* PCGASMGetSubKSP_GASM() */
876 EXTERN_C_END
877 
878 
879 #undef __FUNCT__
880 #define __FUNCT__ "PCGASMSetSubdomains"
881 /*@C
882     PCGASMSetSubdomains - Sets the subdomains for this processor
883     for the additive Schwarz preconditioner.
884 
885     Collective on PC
886 
887     Input Parameters:
888 +   pc  - the preconditioner context
889 .   n   - the number of subdomains for this processor
890 .   iis - the index sets that define this processor's local inner subdomains
891          (or PETSC_NULL for PETSc to determine subdomains)
892 -   ois- the index sets that define this processor's local outer subdomains
893          (or PETSC_NULL to use the same as iis)
894 
895     Notes:
896     The IS indices use the parallel, global numbering of the vector entries.
897     Inner subdomains are those where the correction is applied.
898     Outer subdomains are those where the residual necessary to obtain the
899     corrections is obtained (see PCGASMType for the use of inner/outer subdomains).
900     Both inner and outer subdomains can extend over several processors.
901     This processor's portion of a subdomain is known as a local subdomain.
902 
903     By default the GASM preconditioner uses 1 (local) subdomain per processor.
904     Use PCGASMSetTotalSubdomains() to set the total number of subdomains across
905     all processors that PCGASM will create automatically, and to specify whether
906     they should be local or not.
907 
908 
909     Level: advanced
910 
911 .keywords: PC, GASM, set, subdomains, additive Schwarz
912 
913 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
914           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
915 @*/
916 PetscErrorCode  PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[])
917 {
918   PetscErrorCode ierr;
919 
920   PetscFunctionBegin;
921   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
922   ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr);
923   PetscFunctionReturn(0);
924 }
925 
926 #undef __FUNCT__
927 #define __FUNCT__ "PCGASMSetTotalSubdomains"
928 /*@C
929     PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive
930     Schwarz preconditioner.  The number of subdomains is cumulative across all processors in pc's
931     communicator. Either all or no processors in the PC communicator must call this routine with
932     the same N.  The subdomains will be created automatically during PCSetUp().
933 
934     Collective on PC
935 
936     Input Parameters:
937 +   pc           - the preconditioner context
938 .   N            - the total number of subdomains cumulative across all processors
939 -   create_local - whether the subdomains to be created are to be local
940 
941     Options Database Key:
942     To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options:
943 +    -pc_gasm_total_subdomains <n>                  - sets the total number of subdomains to be autocreated by PCGASM
944 -    -pc_gasm_subdomains_create_local <true|false>  - whether autocreated subdomains should be local or not (default is true)
945 
946     By default the GASM preconditioner uses 1 subdomain per processor.
947 
948 
949     Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers
950     of subdomains per processor.
951 
952     Level: advanced
953 
954 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
955 
956 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
957           PCGASMCreateSubdomains2D()
958 @*/
959 PetscErrorCode  PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local)
960 {
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
965   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr);
966   PetscFunctionReturn(0);
967 }
968 
969 #undef __FUNCT__
970 #define __FUNCT__ "PCGASMSetOverlap"
971 /*@
972     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
973     additive Schwarz preconditioner.  Either all or no processors in the
974     PC communicator must call this routine.
975 
976     Logically Collective on PC
977 
978     Input Parameters:
979 +   pc  - the preconditioner context
980 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
981 
982     Options Database Key:
983 .   -pc_gasm_overlap <overlap> - Sets overlap
984 
985     Notes:
986     By default the GASM preconditioner uses 1 subdomain per processor.  To use
987     multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or
988     PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>).
989 
990     The overlap defaults to 1, so if one desires that no additional
991     overlap be computed beyond what may have been set with a call to
992     PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl
993     must be set to be 0.  In particular, if one does not explicitly set
994     the subdomains in application code, then all overlap would be computed
995     internally by PETSc, and using an overlap of 0 would result in an GASM
996     variant that is equivalent to the block Jacobi preconditioner.
997 
998     Note that one can define initial index sets with any overlap via
999     PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows
1000     PETSc to extend that overlap further, if desired.
1001 
1002     Level: intermediate
1003 
1004 .keywords: PC, GASM, set, overlap
1005 
1006 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1007           PCGASMCreateSubdomains2D(), PCGASMGetSubdomains()
1008 @*/
1009 PetscErrorCode  PCGASMSetOverlap(PC pc,PetscInt ovl)
1010 {
1011   PetscErrorCode ierr;
1012 
1013   PetscFunctionBegin;
1014   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1015   PetscValidLogicalCollectiveInt(pc,ovl,2);
1016   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 #undef __FUNCT__
1021 #define __FUNCT__ "PCGASMSetType"
1022 /*@
1023     PCGASMSetType - Sets the type of restriction and interpolation used
1024     for local problems in the additive Schwarz method.
1025 
1026     Logically Collective on PC
1027 
1028     Input Parameters:
1029 +   pc  - the preconditioner context
1030 -   type - variant of GASM, one of
1031 .vb
1032       PC_GASM_BASIC       - full interpolation and restriction
1033       PC_GASM_RESTRICT    - full restriction, local processor interpolation
1034       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
1035       PC_GASM_NONE        - local processor restriction and interpolation
1036 .ve
1037 
1038     Options Database Key:
1039 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1040 
1041     Level: intermediate
1042 
1043 .keywords: PC, GASM, set, type
1044 
1045 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1046           PCGASMCreateSubdomains2D()
1047 @*/
1048 PetscErrorCode  PCGASMSetType(PC pc,PCGASMType type)
1049 {
1050   PetscErrorCode ierr;
1051 
1052   PetscFunctionBegin;
1053   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1054   PetscValidLogicalCollectiveEnum(pc,type,2);
1055   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
1056   PetscFunctionReturn(0);
1057 }
1058 
1059 #undef __FUNCT__
1060 #define __FUNCT__ "PCGASMSetSortIndices"
1061 /*@
1062     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
1063 
1064     Logically Collective on PC
1065 
1066     Input Parameters:
1067 +   pc  - the preconditioner context
1068 -   doSort - sort the subdomain indices
1069 
1070     Level: intermediate
1071 
1072 .keywords: PC, GASM, set, type
1073 
1074 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
1075           PCGASMCreateSubdomains2D()
1076 @*/
1077 PetscErrorCode  PCGASMSetSortIndices(PC pc,PetscBool  doSort)
1078 {
1079   PetscErrorCode ierr;
1080 
1081   PetscFunctionBegin;
1082   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1083   PetscValidLogicalCollectiveBool(pc,doSort,2);
1084   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1085   PetscFunctionReturn(0);
1086 }
1087 
1088 #undef __FUNCT__
1089 #define __FUNCT__ "PCGASMGetSubKSP"
1090 /*@C
1091    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
1092    this processor.
1093 
1094    Collective on PC iff first_local is requested
1095 
1096    Input Parameter:
1097 .  pc - the preconditioner context
1098 
1099    Output Parameters:
1100 +  n_local - the number of blocks on this processor or PETSC_NULL
1101 .  first_local - the global number of the first block on this processor or PETSC_NULL,
1102                  all processors must request or all must pass PETSC_NULL
1103 -  ksp - the array of KSP contexts
1104 
1105    Note:
1106    After PCGASMGetSubKSP() the array of KSPes is not to be freed
1107 
1108    Currently for some matrix implementations only 1 block per processor
1109    is supported.
1110 
1111    You must call KSPSetUp() before calling PCGASMGetSubKSP().
1112 
1113    Level: advanced
1114 
1115 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
1116 
1117 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(),
1118           PCGASMCreateSubdomains2D(),
1119 @*/
1120 PetscErrorCode  PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1121 {
1122   PetscErrorCode ierr;
1123 
1124   PetscFunctionBegin;
1125   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1126   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1127   PetscFunctionReturn(0);
1128 }
1129 
1130 /* -------------------------------------------------------------------------------------*/
1131 /*MC
1132    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1133            its own KSP object.
1134 
1135    Options Database Keys:
1136 +  -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors
1137 .  -pc_gasm_view_subdomains       - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view
1138 .  -pc_gasm_print_subdomains      - activates the printing of subdomain indices in PCSetUp()
1139 .  -pc_gasm_overlap <ovl>         - Sets overlap by which to (automatically) extend local subdomains
1140 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
1141 
1142      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1143       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
1144       -pc_gasm_type basic to use the standard GASM.
1145 
1146    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1147      than one processor. Defaults to one block per processor.
1148 
1149      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1150         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1151 
1152      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
1153          and set the options directly on the resulting KSP object (you can access its PC
1154          with KSPGetPC())
1155 
1156 
1157    Level: beginner
1158 
1159    Concepts: additive Schwarz method
1160 
1161     References:
1162     An additive variant of the Schwarz alternating method for the case of many subregions
1163     M Dryja, OB Widlund - Courant Institute, New York University Technical report
1164 
1165     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1166     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
1167 
1168 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1169            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetSubdomains(),
1170            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
1171 
1172 M*/
1173 
1174 EXTERN_C_BEGIN
1175 #undef __FUNCT__
1176 #define __FUNCT__ "PCCreate_GASM"
1177 PetscErrorCode  PCCreate_GASM(PC pc)
1178 {
1179   PetscErrorCode ierr;
1180   PC_GASM         *osm;
1181 
1182   PetscFunctionBegin;
1183   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
1184   osm->N                 = PETSC_DECIDE;
1185   osm->n                 = PETSC_DECIDE;
1186   osm->nmax              = 0;
1187   osm->overlap           = 1;
1188   osm->ksp               = 0;
1189   osm->gorestriction     = 0;
1190   osm->girestriction     = 0;
1191   osm->gx                = 0;
1192   osm->gy                = 0;
1193   osm->x                 = 0;
1194   osm->y                 = 0;
1195   osm->ois               = 0;
1196   osm->iis               = 0;
1197   osm->pmat              = 0;
1198   osm->type              = PC_GASM_RESTRICT;
1199   osm->same_subdomain_solvers = PETSC_TRUE;
1200   osm->sort_indices           = PETSC_TRUE;
1201 
1202   pc->data                   = (void*)osm;
1203   pc->ops->apply             = PCApply_GASM;
1204   pc->ops->applytranspose    = PCApplyTranspose_GASM;
1205   pc->ops->setup             = PCSetUp_GASM;
1206   pc->ops->reset             = PCReset_GASM;
1207   pc->ops->destroy           = PCDestroy_GASM;
1208   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
1209   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
1210   pc->ops->view              = PCView_GASM;
1211   pc->ops->applyrichardson   = 0;
1212 
1213   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSubdomains_C","PCGASMSetSubdomains_GASM",
1214                     PCGASMSetSubdomains_GASM);CHKERRQ(ierr);
1215   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
1216                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1217   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1218                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1219   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1220                     PCGASMSetType_GASM);CHKERRQ(ierr);
1221   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1222                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1223   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1224                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1225   PetscFunctionReturn(0);
1226 }
1227 EXTERN_C_END
1228 
1229 
1230 #undef __FUNCT__
1231 #define __FUNCT__ "PCGASMCreateLocalSubdomains"
1232 /*@C
1233    PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping
1234    Schwarz preconditioner for a any problem based on its matrix.
1235 
1236    Collective
1237 
1238    Input Parameters:
1239 +  A       - The global matrix operator
1240 .  overlap - amount of overlap in outer subdomains
1241 -  n       - the number of local subdomains
1242 
1243    Output Parameters:
1244 +  iis - the array of index sets defining the local inner subdomains (on which the correction is applied)
1245 -  ois - the array of index sets defining the local outer subdomains (on which the residual is computed)
1246 
1247    Level: advanced
1248 
1249    Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF;
1250          PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a
1251          nonzero overlap with PCGASMSetOverlap()
1252 
1253     In the Fortran version you must provide the array outis[] already allocated of length n.
1254 
1255 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1256 
1257 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains()
1258 @*/
1259 PetscErrorCode  PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS* iis[], IS* ois[])
1260 {
1261   MatPartitioning           mpart;
1262   const char                *prefix;
1263   PetscErrorCode            (*f)(Mat,MatReuse,Mat*);
1264   PetscMPIInt               size;
1265   PetscInt                  i,j,rstart,rend,bs;
1266   PetscBool                 isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1267   Mat                       Ad = PETSC_NULL, adj;
1268   IS                        ispart,isnumb,*is;
1269   PetscErrorCode            ierr;
1270 
1271   PetscFunctionBegin;
1272   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1273   PetscValidPointer(iis,4);
1274   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1275 
1276   /* Get prefix, row distribution, and block size */
1277   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1278   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1279   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1280   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1281 
1282   /* Get diagonal block from matrix if possible */
1283   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1284   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1285   if (f) {
1286     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1287   } else if (size == 1) {
1288     Ad = A;
1289   }
1290   if (Ad) {
1291     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1292     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1293   }
1294   if (Ad && n > 1) {
1295     PetscBool  match,done;
1296     /* Try to setup a good matrix partitioning if available */
1297     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1298     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1299     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1300     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1301     if (!match) {
1302       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1303     }
1304     if (!match) { /* assume a "good" partitioner is available */
1305       PetscInt na;
1306       const PetscInt *ia,*ja;
1307       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1308       if (done) {
1309         /* Build adjacency matrix by hand. Unfortunately a call to
1310            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1311            remove the block-aij structure and we cannot expect
1312            MatPartitioning to split vertices as we need */
1313         PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0;
1314         const PetscInt *row;
1315         nnz = 0;
1316         for (i=0; i<na; i++) { /* count number of nonzeros */
1317           len = ia[i+1] - ia[i];
1318           row = ja + ia[i];
1319           for (j=0; j<len; j++) {
1320             if (row[j] == i) { /* don't count diagonal */
1321               len--; break;
1322             }
1323           }
1324           nnz += len;
1325         }
1326         ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1327         ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1328         nnz    = 0;
1329         iia[0] = 0;
1330         for (i=0; i<na; i++) { /* fill adjacency */
1331           cnt = 0;
1332           len = ia[i+1] - ia[i];
1333           row = ja + ia[i];
1334           for (j=0; j<len; j++) {
1335             if (row[j] != i) { /* if not diagonal */
1336               jja[nnz+cnt++] = row[j];
1337             }
1338           }
1339           nnz += cnt;
1340           iia[i+1] = nnz;
1341         }
1342         /* Partitioning of the adjacency matrix */
1343         ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1344         ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1345         ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1346         ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1347         ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1348         ierr = MatDestroy(&adj);CHKERRQ(ierr);
1349         foundpart = PETSC_TRUE;
1350       }
1351       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1352     }
1353     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1354   }
1355   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1356   if (!foundpart) {
1357 
1358     /* Partitioning by contiguous chunks of rows */
1359 
1360     PetscInt mbs   = (rend-rstart)/bs;
1361     PetscInt start = rstart;
1362     for (i=0; i<n; i++) {
1363       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1364       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1365       start += count;
1366     }
1367 
1368   } else {
1369 
1370     /* Partitioning by adjacency of diagonal block  */
1371 
1372     const PetscInt *numbering;
1373     PetscInt       *count,nidx,*indices,*newidx,start=0;
1374     /* Get node count in each partition */
1375     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1376     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1377     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1378       for (i=0; i<n; i++) count[i] *= bs;
1379     }
1380     /* Build indices from node numbering */
1381     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1382     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1383     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1384     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1385     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1386     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1387     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1388       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1389       for (i=0; i<nidx; i++)
1390         for (j=0; j<bs; j++)
1391           newidx[i*bs+j] = indices[i]*bs + j;
1392       ierr = PetscFree(indices);CHKERRQ(ierr);
1393       nidx   *= bs;
1394       indices = newidx;
1395     }
1396     /* Shift to get global indices */
1397     for (i=0; i<nidx; i++) indices[i] += rstart;
1398 
1399     /* Build the index sets for each block */
1400     for (i=0; i<n; i++) {
1401       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1402       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1403       start += count[i];
1404     }
1405 
1406     ierr = PetscFree(count);
1407     ierr = PetscFree(indices);
1408     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1409     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1410   }
1411   *iis = is;
1412   if (!ois) PetscFunctionReturn(0);
1413   /*
1414    Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap
1415    has been requested, copy the inner subdomains over so they can be modified.
1416    */
1417   ierr = PetscMalloc(n*sizeof(IS),ois);CHKERRQ(ierr);
1418   for (i=0; i<n; ++i) {
1419     if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */
1420       ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr);
1421       ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr);
1422     } else {
1423       ierr = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr);
1424       (*ois)[i] = (*iis)[i];
1425     }
1426   }
1427   if (overlap > 0) {
1428     /* Extend the "overlapping" regions by a number of steps */
1429     ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr);
1430   }
1431   PetscFunctionReturn(0);
1432 }
1433 
1434 #undef __FUNCT__
1435 #define __FUNCT__ "PCGASMDestroySubdomains"
1436 /*@C
1437    PCGASMDestroySubdomains - Destroys the index sets created with
1438    PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be
1439    called after setting subdomains with PCGASMSetSubdomains().
1440 
1441    Collective
1442 
1443    Input Parameters:
1444 +  n   - the number of index sets
1445 .  iis - the array of inner subdomains,
1446 -  ois - the array of outer subdomains, can be PETSC_NULL
1447 
1448    Level: intermediate
1449 
1450    Notes: this is merely a convenience subroutine that walks each list,
1451    destroys each IS on the list, and then frees the list.
1452 
1453 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1454 
1455 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains()
1456 @*/
1457 PetscErrorCode  PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[])
1458 {
1459   PetscInt       i;
1460   PetscErrorCode ierr;
1461 
1462   PetscFunctionBegin;
1463   if (n <= 0) PetscFunctionReturn(0);
1464   if (iis) {
1465     PetscValidPointer(iis,2);
1466     for (i=0; i<n; i++) {
1467       ierr = ISDestroy(&iis[i]);CHKERRQ(ierr);
1468     }
1469     ierr = PetscFree(iis);CHKERRQ(ierr);
1470   }
1471   if (ois) {
1472     for (i=0; i<n; i++) {
1473       ierr = ISDestroy(&ois[i]);CHKERRQ(ierr);
1474     }
1475     ierr = PetscFree(ois);CHKERRQ(ierr);
1476   }
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 
1481 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \
1482 {                                                                                                       \
1483  PetscInt first_row = first/M, last_row = last/M+1;                                                     \
1484   /*                                                                                                    \
1485    Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners       \
1486    of the bounding box of the intersection of the subdomain with the local ownership range (local       \
1487    subdomain).                                                                                          \
1488    Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows       \
1489    of the intersection.                                                                                 \
1490   */                                                                                                    \
1491   /* ylow_loc is the grid row containing the first element of the local sumbdomain */                   \
1492   *ylow_loc = PetscMax(first_row,ylow);                                                                    \
1493   /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \
1494   *xleft_loc = *ylow_loc==first_row?PetscMax(first%M,xleft):xleft;                                                                            \
1495   /* yhigh_loc is the grid row above the last local subdomain element */                                                                    \
1496   *yhigh_loc = PetscMin(last_row,yhigh);                                                                                                     \
1497   /* xright is the offset of the end of the  local subdomain within its grid row (might actually be outside the local subdomain) */         \
1498   *xright_loc = *yhigh_loc==last_row?PetscMin(xright,last%M):xright;                                                                          \
1499   /* Now compute the size of the local subdomain n. */ \
1500   *n = 0;                                               \
1501   if (*ylow_loc < *yhigh_loc) {                           \
1502     PetscInt width = xright-xleft;                     \
1503     *n += width*(*yhigh_loc-*ylow_loc-1);                 \
1504     *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \
1505     *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \
1506   }\
1507 }
1508 
1509 
1510 
1511 #undef __FUNCT__
1512 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1513 /*@
1514    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1515    preconditioner for a two-dimensional problem on a regular grid.
1516 
1517    Collective
1518 
1519    Input Parameters:
1520 +  M, N               - the global number of grid points in the x and y directions
1521 .  Mdomains, Ndomains - the global number of subdomains in the x and y directions
1522 .  dof                - degrees of freedom per node
1523 -  overlap            - overlap in mesh lines
1524 
1525    Output Parameters:
1526 +  Nsub - the number of local subdomains created
1527 .  iis  - array of index sets defining inner (nonoverlapping) subdomains
1528 -  ois  - array of index sets defining outer (overlapping, if overlap > 0) subdomains
1529 
1530 
1531    Level: advanced
1532 
1533 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1534 
1535 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(),
1536           PCGASMSetOverlap()
1537 @*/
1538 PetscErrorCode  PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois)
1539 {
1540   PetscErrorCode ierr;
1541   PetscMPIInt    size, rank;
1542   PetscInt       i, j;
1543   PetscInt       maxheight, maxwidth;
1544   PetscInt       xstart, xleft, xright, xleft_loc, xright_loc;
1545   PetscInt       ystart, ylow,  yhigh,  ylow_loc,  yhigh_loc;
1546   PetscInt       x[2][2], y[2][2], n[2];
1547   PetscInt       first, last;
1548   PetscInt       nidx, *idx;
1549   PetscInt       ii,jj,s,q,d;
1550   PetscInt       k,kk;
1551   PetscMPIInt    color;
1552   MPI_Comm       comm, subcomm;
1553   IS             **xis = 0, **is = ois, **is_local = iis;
1554 
1555   PetscFunctionBegin;
1556   ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr);
1557   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1558   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1559   ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr);
1560   if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) "
1561              "does not respect the number of degrees of freedom per grid point %D", first, last, dof);
1562 
1563   /* Determine the number of domains with nonzero intersections with the local ownership range. */
1564   s = 0;
1565   ystart = 0;
1566   for (j=0; j<Ndomains; ++j) {
1567     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1568     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1569     /* Vertical domain limits with an overlap. */
1570     ylow = PetscMax(ystart - overlap,0);
1571     yhigh = PetscMin(ystart + maxheight + overlap,N);
1572     xstart = 0;
1573     for (i=0; i<Mdomains; ++i) {
1574       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1575       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1576       /* Horizontal domain limits with an overlap. */
1577       xleft   = PetscMax(xstart - overlap,0);
1578       xright  = PetscMin(xstart + maxwidth + overlap,M);
1579       /*
1580          Determine whether this subdomain intersects this processor's ownership range of pc->pmat.
1581       */
1582       PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1583       if (nidx) {
1584         ++s;
1585       }
1586       xstart += maxwidth;
1587     }/* for (i = 0; i < Mdomains; ++i) */
1588     ystart += maxheight;
1589   }/* for (j = 0; j < Ndomains; ++j) */
1590   /* Now we can allocate the necessary number of ISs. */
1591   *nsub = s;
1592   ierr = PetscMalloc((*nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1593   ierr = PetscMalloc((*nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1594   s = 0;
1595   ystart = 0;
1596   for (j=0; j<Ndomains; ++j) {
1597     maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */
1598     if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N);
1599     /* Vertical domain limits with an overlap. */
1600     y[0][0] = PetscMax(ystart - overlap,0);
1601     y[0][1] = PetscMin(ystart + maxheight + overlap,N);
1602     /* Vertical domain limits without an overlap. */
1603     y[1][0] = ystart;
1604     y[1][1] = PetscMin(ystart + maxheight,N);
1605     xstart = 0;
1606     for (i=0; i<Mdomains; ++i) {
1607       maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */
1608       if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M);
1609       /* Horizontal domain limits with an overlap. */
1610       x[0][0]  = PetscMax(xstart - overlap,0);
1611       x[0][1]  = PetscMin(xstart + maxwidth + overlap,M);
1612       /* Horizontal domain limits without an overlap. */
1613       x[1][0] = xstart;
1614       x[1][1] = PetscMin(xstart+maxwidth,M);
1615       /*
1616          Determine whether this domain intersects this processor's ownership range of pc->pmat.
1617          Do this twice: first for the domains with overlaps, and once without.
1618          During the first pass create the subcommunicators, and use them on the second pass as well.
1619       */
1620       for (q = 0; q < 2; ++q) {
1621         /*
1622           domain limits, (xleft, xright) and (ylow, yheigh) are adjusted
1623           according to whether the domain with an overlap or without is considered.
1624         */
1625         xleft = x[q][0]; xright = x[q][1];
1626         ylow  = y[q][0]; yhigh  = y[q][1];
1627         PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx));
1628         nidx *= dof;
1629         n[q] = nidx;
1630         /*
1631          Based on the counted number of indices in the local domain *with an overlap*,
1632          construct a subcommunicator of all the processors supporting this domain.
1633          Observe that a domain with an overlap might have nontrivial local support,
1634          while the domain without an overlap might not.  Hence, the decision to participate
1635          in the subcommunicator must be based on the domain with an overlap.
1636          */
1637         if (q == 0) {
1638           if (nidx) {
1639             color = 1;
1640           } else {
1641             color = MPI_UNDEFINED;
1642           }
1643           ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr);
1644         }
1645         /*
1646          Proceed only if the number of local indices *with an overlap* is nonzero.
1647          */
1648         if (n[0]) {
1649           if (q == 0) {
1650             xis = is;
1651           }
1652           if (q == 1) {
1653             /*
1654              The IS for the no-overlap subdomain shares a communicator with the overlapping domain.
1655              Moreover, if the overlap is zero, the two ISs are identical.
1656              */
1657             if (overlap == 0) {
1658               (*is_local)[s] = (*is)[s];
1659               ierr = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr);
1660               continue;
1661             } else {
1662               xis = is_local;
1663               subcomm = ((PetscObject)(*is)[s])->comm;
1664             }
1665           }/* if (q == 1) */
1666           idx = PETSC_NULL;
1667           ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1668           if (nidx) {
1669             k    = 0;
1670             for (jj=ylow_loc; jj<yhigh_loc; ++jj) {
1671               PetscInt x0 = (jj==ylow_loc)?xleft_loc:xleft;
1672               PetscInt x1 = (jj==yhigh_loc-1)?xright_loc:xright;
1673               kk = dof*(M*jj + x0);
1674               for (ii=x0; ii<x1; ++ii) {
1675                 for (d = 0; d < dof; ++d) {
1676                   idx[k++] = kk++;
1677                 }
1678               }
1679             }
1680           }
1681           ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr);
1682         }/* if (n[0]) */
1683       }/* for (q = 0; q < 2; ++q) */
1684       if (n[0]) {
1685         ++s;
1686       }
1687       xstart += maxwidth;
1688     }/* for (i = 0; i < Mdomains; ++i) */
1689     ystart += maxheight;
1690   }/* for (j = 0; j < Ndomains; ++j) */
1691   PetscFunctionReturn(0);
1692 }
1693 
1694 #undef __FUNCT__
1695 #define __FUNCT__ "PCGASMGetSubdomains"
1696 /*@C
1697     PCGASMGetSubdomains - Gets the subdomains supported on this processor
1698     for the additive Schwarz preconditioner.
1699 
1700     Not Collective
1701 
1702     Input Parameter:
1703 .   pc - the preconditioner context
1704 
1705     Output Parameters:
1706 +   n   - the number of subdomains for this processor (default value = 1)
1707 .   iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be PETSC_NULL)
1708 -   ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be PETSC_NULL)
1709 
1710 
1711     Notes:
1712     The user is responsible for destroying the ISs and freeing the returned arrays.
1713     The IS numbering is in the parallel, global numbering of the vector.
1714 
1715     Level: advanced
1716 
1717 .keywords: PC, GASM, get, subdomains, additive Schwarz
1718 
1719 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1720           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices()
1721 @*/
1722 PetscErrorCode  PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[])
1723 {
1724   PC_GASM         *osm;
1725   PetscErrorCode ierr;
1726   PetscBool      match;
1727   PetscInt       i;
1728 
1729   PetscFunctionBegin;
1730   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1731   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1732   if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1733   osm = (PC_GASM*)pc->data;
1734   if (n)  *n  = osm->n;
1735   if (iis) {
1736     ierr = PetscMalloc(osm->n*sizeof(IS), iis);CHKERRQ(ierr);
1737   }
1738   if (ois) {
1739     ierr = PetscMalloc(osm->n*sizeof(IS), ois);CHKERRQ(ierr);
1740   }
1741   if (iis || ois) {
1742     for (i = 0; i < osm->n; ++i) {
1743       if (iis) (*iis)[i] = osm->iis[i];
1744       if (ois) (*ois)[i] = osm->ois[i];
1745     }
1746   }
1747   PetscFunctionReturn(0);
1748 }
1749 
1750 #undef __FUNCT__
1751 #define __FUNCT__ "PCGASMGetSubmatrices"
1752 /*@C
1753     PCGASMGetSubmatrices - Gets the local submatrices (for this processor
1754     only) for the additive Schwarz preconditioner.
1755 
1756     Not Collective
1757 
1758     Input Parameter:
1759 .   pc - the preconditioner context
1760 
1761     Output Parameters:
1762 +   n   - the number of matrices for this processor (default value = 1)
1763 -   mat - the matrices
1764 
1765     Notes: matrices returned by this routine have the same communicators as the index sets (IS)
1766            used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the
1767            subdomains were defined using PCGASMSetTotalSubdomains().
1768     Level: advanced
1769 
1770 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1771 
1772 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1773           PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains()
1774 @*/
1775 PetscErrorCode  PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1776 {
1777   PC_GASM         *osm;
1778   PetscErrorCode ierr;
1779   PetscBool      match;
1780 
1781   PetscFunctionBegin;
1782   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1783   PetscValidIntPointer(n,2);
1784   if (mat) PetscValidPointer(mat,3);
1785   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1786   ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1787   if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name);
1788   osm = (PC_GASM*)pc->data;
1789   if (n)   *n   = osm->n;
1790   if (mat) *mat = osm->pmat;
1791 
1792   PetscFunctionReturn(0);
1793 }
1794