xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision 7fccdfe47c11684cce0a7418a4d171062eeab339)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
5   In this version each processor may have any number of subdomains and a subdomain may live on multiple
6   processors.
7 
8        N    - total number of true subdomains on all processors
9        n    - actual number of subdomains on this processor
10        nmax - maximum number of subdomains per processor
11 */
12 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   PetscInt   N,n,nmax;
16   PetscInt   overlap;             /* overlap requested by user */
17   KSP        *ksp;                /* linear solvers for each block */
18   Vec        gx,gy;               /* Merged work vectors */
19   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
20   IS         gis, gis_local;      /* merged ISs */
21   VecScatter grestriction;        /* merged restriction */
22   VecScatter gprolongation;       /* merged prolongation */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each local subdomain (same as subdomain with the overlap removed); may be NULL */
25   Mat        *pmat;               /* subdomain block matrices */
26   PCGASMType  type;               /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_GASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_GASM"
34 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
35 {
36   PC_GASM         *osm = (PC_GASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->nmax > 0)     {ierr = PetscSNPrintf(blocks,sizeof blocks,"max number of  subdomain blocks = %D",osm->nmax);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
53     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: restriction/interpolation type - %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
55     if (osm->same_local_solves) {
56       if (osm->ksp) {
57         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59         if (!rank) {
60           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
62           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63         }
64         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       }
66     } else {
67       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
71       for (i=0; i<osm->nmax; i++) {
72         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i < osm->n) {
74           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
75           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
76           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78         }
79         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
80       }
81       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
82       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
83     }
84   } else if (isstring) {
85     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCGASMTypes[osm->type]);CHKERRQ(ierr);
86     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
87     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
88     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
89   } else {
90     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "PCGASMPrintSubdomains"
97 static PetscErrorCode PCGASMPrintSubdomains(PC pc)
98 {
99   PC_GASM         *osm  = (PC_GASM*)pc->data;
100   const char     *prefix;
101   char           fname[PETSC_MAX_PATH_LEN+1];
102   PetscViewer    viewer;
103   PetscInt       i,j,nidx;
104   const PetscInt *idx;
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
109   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
110   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
111   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
112   for (i=0;i<osm->n;++i) {
113     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
114     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
115     for (j=0; j<nidx; j++) {
116       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
117     }
118     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
119     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
120     if (osm->is_local) {
121       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
123       for (j=0; j<nidx; j++) {
124         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
125       }
126       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
127       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
128     }
129   }
130   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
131   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PCSetUp_GASM"
137 static PetscErrorCode PCSetUp_GASM(PC pc)
138 {
139   PC_GASM         *osm  = (PC_GASM*)pc->data;
140   PetscErrorCode ierr;
141   PetscBool      symset,flg;
142   PetscInt       i,firstRow,lastRow;
143   PetscMPIInt    size;
144   MatReuse       scall = MAT_REUSE_MATRIX;
145   KSP            ksp;
146   PC             subpc;
147   const char     *prefix,*pprefix;
148   PetscInt       dn;       /* Number of indices in a single subdomain assigned to this processor. */
149   const PetscInt *didx;    /* Indices from a single subdomain assigned to this processor. */
150   PetscInt       ddn;      /* Number of indices in all subdomains assigned to this processor. */
151   PetscInt       *ddidx;   /* Indices of all subdomains assigned to this processor. */
152   IS             gid;      /* Identity IS of the size of all subdomains assigned to this processor. */
153   Vec            x,y;
154   PetscScalar    *gxarray, *gyarray;
155   IS             *is;
156 
157   PetscFunctionBegin;
158   if (!pc->setupcalled) {
159 
160     if (!osm->type_set) {
161       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
162       if (symset && flg) { osm->type = PC_GASM_BASIC; }
163     }
164 
165     if (osm->N == PETSC_DECIDE && osm->n < 1) {
166       /* no subdomains given, use one per processor */
167       osm->nmax = osm->n = 1;
168       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
169       osm->N = size;
170     } else if (osm->N == PETSC_DECIDE) {
171       PetscInt inwork[2], outwork[2];
172       /* determine global number of subdomains and the max number of local subdomains */
173       inwork[0] = inwork[1] = osm->n;
174       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
175       osm->nmax = outwork[0];
176       osm->N    = outwork[1];
177     }
178     if (!osm->is){ /* create the index sets */
179       ierr = PCGASMCreateSubdomains(pc->pmat,osm->n,&osm->is);CHKERRQ(ierr);
180     }
181     if (!osm->is_local) {
182       /*
183 	 This indicates that osm->is should define a nonoverlapping decomposition
184 	 (there is no way to really guarantee that if subdomains are set by the user through PCGASMSetLocalSubdomains,
185 	  but the assumption is that either the user does the right thing, or subdomains in ossm->is have been created
186 	  via PCGASMCreateSubdomains, which guarantees a nonoverlapping decomposition).
187 	 Therefore, osm->is will be used to define osm->is_local.
188 	 If a nonzero overlap has been requested by the user, then osm->is will be expanded and will overlap,
189 	 so osm->is_local should obtain a copy of osm->is while they are still (presumably) nonoverlapping.
190 	 Otherwise (no overlap has been requested), osm->is_local are simply aliases for osm->is.
191       */
192       ierr = PetscMalloc(osm->n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
193       for (i=0; i<osm->n; i++) {
194         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
195           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
196           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
197         } else {
198           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
199           osm->is_local[i] = osm->is[i];
200         }
201       }
202     }
203     /* Beyond this point osm->is_local is not null. */
204     if (osm->overlap > 0) {
205       /* Extend the "overlapping" regions by a number of steps */
206       ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->is,osm->overlap);CHKERRQ(ierr);
207     }
208     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
209     flg  = PETSC_FALSE;
210     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
211     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
212 
213     if (osm->sort_indices) {
214       for (i=0; i<osm->n; i++) {
215         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
216 	ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
217       }
218     }
219     /* Merge the ISs, create merged vectors and scatter contexts. */
220     /* Restriction ISs. */
221     ddn = 0;
222     for (i=0; i<osm->n; i++) {
223       ierr = ISGetLocalSize(osm->is[i],&dn);          CHKERRQ(ierr);
224       ddn += dn;
225     }
226     ierr = PetscMalloc(ddn*sizeof(PetscInt), &ddidx); CHKERRQ(ierr);
227     ddn = 0;
228     for (i=0; i<osm->n; i++) {
229       ierr = ISGetLocalSize(osm->is[i],&dn); CHKERRQ(ierr);
230       ierr = ISGetIndices(osm->is[i],&didx); CHKERRQ(ierr);
231       ierr = PetscMemcpy(ddidx+ddn, didx, sizeof(PetscInt)*dn); CHKERRQ(ierr);
232       ierr = ISRestoreIndices(osm->is[i], &didx); CHKERRQ(ierr);
233       ddn += dn;
234     }
235     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, ddn, ddidx, PETSC_OWN_POINTER, &osm->gis); CHKERRQ(ierr);
236     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
237     ierr = VecCreateMPI(((PetscObject)pc)->comm, ddn, PETSC_DECIDE, &osm->gx); CHKERRQ(ierr);
238     ierr = VecDuplicate(osm->gx,&osm->gy);                                     CHKERRQ(ierr);
239     ierr = ISCreateStride(((PetscObject)pc)->comm,ddn,0,1, &gid);              CHKERRQ(ierr);
240     ierr = VecScatterCreate(x,osm->gis,osm->gx,gid, &(osm->grestriction));   CHKERRQ(ierr);
241     ierr = ISDestroy(gid);                                                     CHKERRQ(ierr);
242     /* Prolongation ISs */
243     { PetscInt       dn_local;       /* Number of indices in the local part of single domain assigned to this processor. */
244       const PetscInt *didx_local;    /* Global indices from the local part of a single domain assigned to this processor. */
245       PetscInt       ddn_local;      /* Number of indices in the local part of the disjoint union all domains assigned to this processor. */
246       PetscInt       *ddidx_local;   /* Global indices of the local part of the disjoint union of all domains assigned to this processor. */
247       /**/
248       ISLocalToGlobalMapping ltog;          /* Map from global to local indices on the disjoint union of subdomains: "local" ind's run from 0 to ddn-1. */
249       PetscInt              *ddidx_llocal;  /* Mapped local indices of the disjoint union of local parts of subdomains. */
250       PetscInt               ddn_llocal;    /* Number of indices in ddidx_llocal; must equal ddn_local, or else gis_local is not a sub-IS of gis. */
251       IS                     gis_llocal;    /* IS with ddidx_llocal indices. */
252       PetscInt               j;
253       ddn_local = 0;
254       for (i=0; i<osm->n; i++) {
255 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
256 	ddn_local += dn_local;
257       }
258       ierr = PetscMalloc(ddn_local*sizeof(PetscInt), &ddidx_local); CHKERRQ(ierr);
259       ddn_local = 0;
260       for (i=0; i<osm->n; i++) {
261 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
262 	ierr = ISGetIndices(osm->is_local[i],&didx_local); CHKERRQ(ierr);
263 	ierr = PetscMemcpy(ddidx_local+ddn_local, didx_local, sizeof(PetscInt)*dn_local); CHKERRQ(ierr);
264 	ierr = ISRestoreIndices(osm->is_local[i], &didx_local); CHKERRQ(ierr);
265 	ddn_local += dn_local;
266       }
267       ierr = PetscMalloc(sizeof(PetscInt)*ddn_local, &ddidx_llocal); CHKERRQ(ierr);
268       ierr = ISCreateGeneral(((PetscObject)pc)->comm, ddn_local, ddidx_local, PETSC_OWN_POINTER, &(osm->gis_local)); CHKERRQ(ierr);
269       ierr = ISLocalToGlobalMappingCreateIS(osm->gis,&ltog);CHKERRQ(ierr);
270       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,dn_local,ddidx_local,&ddn_llocal,ddidx_llocal);CHKERRQ(ierr);
271       ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
272       if (ddn_llocal != ddn_local) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"gis_local contains %D indices outside of gis", ddn_llocal - ddn_local);
273       /* Now convert these localized indices into the global indices into the merged output vector. */
274       ierr = VecGetOwnershipRange(osm->gy, &firstRow, &lastRow); CHKERRQ(ierr);
275       for(j=0; j < ddn_llocal; ++j) {
276 	ddidx_llocal[j] += firstRow;
277       }
278       ierr = ISCreateGeneral(PETSC_COMM_SELF,ddn_llocal,ddidx_llocal,PETSC_OWN_POINTER,&gis_llocal); CHKERRQ(ierr);
279       ierr = VecScatterCreate(y,osm->gis_local,osm->gy,gis_llocal,&osm->gprolongation);              CHKERRQ(ierr);
280       ierr = ISDestroy(gis_llocal);CHKERRQ(ierr);
281     }
282     /* Create the subdomain work vectors. */
283     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
284     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
285     ierr = VecGetOwnershipRange(osm->gx, &firstRow, &lastRow);CHKERRQ(ierr);
286     ierr = VecGetArray(osm->gx, &gxarray);                                     CHKERRQ(ierr);
287     ierr = VecGetArray(osm->gy, &gyarray);                                     CHKERRQ(ierr);
288     for (i=0, ddn=0; i<osm->n; ++i, ddn += dn) {
289       ierr = ISGetLocalSize(osm->is[i],&dn);CHKERRQ(ierr);
290       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gxarray+ddn,&osm->x[i]);CHKERRQ(ierr);
291       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gyarray+ddn,&osm->y[i]);CHKERRQ(ierr);
292     }
293     ierr = VecRestoreArray(osm->gx, &gxarray); CHKERRQ(ierr);
294     ierr = VecRestoreArray(osm->gy, &gyarray); CHKERRQ(ierr);
295     ierr = VecDestroy(x); CHKERRQ(ierr);
296     ierr = VecDestroy(y); CHKERRQ(ierr);
297     /* Create the local solvers */
298     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
299     for (i=0; i<osm->n; i++) { /* KSPs are local */
300       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
301       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
302       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
303       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
304       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
305       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
306       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
307       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
308       osm->ksp[i] = ksp;
309     }
310     scall = MAT_INITIAL_MATRIX;
311 
312   } else {
313     /*
314        Destroy the blocks from the previous iteration
315     */
316     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
317       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
318       scall = MAT_INITIAL_MATRIX;
319     }
320   }
321 
322   /*
323      Extract out the submatrices.
324   */
325   ierr = PetscMalloc(sizeof(IS)*osm->nmax, &is); CHKERRQ(ierr);
326   for(i=0; i<osm->n; ++i) {
327     is[i] = osm->is[i];
328   }
329   for (i=osm->n; i<osm->nmax; i++) {
330     ierr = ISCreateStride(((PetscObject)pc)->comm,0,0,1,&is[i]);CHKERRQ(ierr);
331   }
332   ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->is,is,scall,&osm->pmat);CHKERRQ(ierr);
333   if (scall == MAT_INITIAL_MATRIX) {
334     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
335     for (i=0; i<osm->n; i++) {
336       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
337       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
338     }
339   }
340 
341   /* Return control to the user so that the submatrices can be modified (e.g., to apply
342      different boundary conditions for the submatrices than for the global problem) */
343   ierr = PCModifySubMatrices(pc,osm->n,is,is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
344   for (i=osm->n; i<osm->nmax; i++) {
345     ierr = ISDestroy(is[i]);CHKERRQ(ierr);
346   }
347   ierr = PetscFree(is); CHKERRQ(ierr);
348 
349   /*
350      Loop over submatrices putting them into local ksp
351   */
352   for (i=0; i<osm->n; i++) {
353     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
354     if (!pc->setupcalled) {
355       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
356     }
357   }
358 
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
364 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
365 {
366   PC_GASM         *osm = (PC_GASM*)pc->data;
367   PetscErrorCode ierr;
368   PetscInt       i;
369 
370   PetscFunctionBegin;
371   for (i=0; i<osm->n; i++) {
372     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
373   }
374   PetscFunctionReturn(0);
375 }
376 
377 #undef __FUNCT__
378 #define __FUNCT__ "PCApply_GASM"
379 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
380 {
381   PC_GASM         *osm = (PC_GASM*)pc->data;
382   PetscErrorCode ierr;
383   PetscInt       i;
384   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
385 
386   PetscFunctionBegin;
387   /*
388      Support for limiting the restriction or interpolation to only local
389      subdomain values (leaving the other values 0).
390   */
391   if (!(osm->type & PC_GASM_RESTRICT)) {
392     forward = SCATTER_FORWARD_LOCAL;
393     /* have to zero the work RHS since scatter may leave some slots empty */
394     ierr = VecZeroEntries(osm->gx); CHKERRQ(ierr);
395   }
396   if (!(osm->type & PC_GASM_INTERPOLATE)) {
397     reverse = SCATTER_REVERSE_LOCAL;
398   }
399 
400   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
401   ierr = VecZeroEntries(y);CHKERRQ(ierr);
402   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
403   /* do the local solves */
404   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
405     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
406   }
407   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
408   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
409   PetscFunctionReturn(0);
410 }
411 
412 #undef __FUNCT__
413 #define __FUNCT__ "PCApplyTranspose_GASM"
414 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
415 {
416   PC_GASM         *osm = (PC_GASM*)pc->data;
417   PetscErrorCode ierr;
418   PetscInt       i;
419   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
420 
421   PetscFunctionBegin;
422   /*
423      Support for limiting the restriction or interpolation to only local
424      subdomain values (leaving the other values 0).
425 
426      Note: these are reversed from the PCApply_GASM() because we are applying the
427      transpose of the three terms
428   */
429   if (!(osm->type & PC_GASM_INTERPOLATE)) {
430     forward = SCATTER_FORWARD_LOCAL;
431     /* have to zero the work RHS since scatter may leave some slots empty */
432     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
433   }
434   if (!(osm->type & PC_GASM_RESTRICT)) {
435     reverse = SCATTER_REVERSE_LOCAL;
436   }
437 
438   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
439   ierr = VecZeroEntries(y);CHKERRQ(ierr);
440   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
441   /* do the local solves */
442   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
443     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
444   }
445   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
446   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
447   PetscFunctionReturn(0);
448 }
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "PCDestroy_GASM"
452 static PetscErrorCode PCDestroy_GASM(PC pc)
453 {
454   PC_GASM         *osm = (PC_GASM*)pc->data;
455   PetscErrorCode ierr;
456   PetscInt       i;
457 
458   PetscFunctionBegin;
459   if (osm->ksp) {
460     for (i=0; i<osm->n; i++) {
461       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
462     }
463     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
464   }
465   if (osm->pmat) {
466     if (osm->n > 0) {
467       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
468     }
469   }
470   for (i=0; i<osm->n; i++) {
471     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
472     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
473   }
474   ierr = PetscFree(osm->x);CHKERRQ(ierr);
475   ierr = PetscFree(osm->y);CHKERRQ(ierr);
476   ierr = VecDestroy(osm->gx); CHKERRQ(ierr);
477   ierr = VecDestroy(osm->gy); CHKERRQ(ierr);
478 
479   ierr = VecScatterDestroy(osm->grestriction); CHKERRQ(ierr);
480   ierr = VecScatterDestroy(osm->gprolongation); CHKERRQ(ierr);
481 
482   if (osm->is) {
483     ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
484   }
485   ierr = PetscFree(osm);CHKERRQ(ierr);
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "PCSetFromOptions_GASM"
491 static PetscErrorCode PCSetFromOptions_GASM(PC pc) {
492   PC_GASM         *osm = (PC_GASM*)pc->data;
493   PetscErrorCode ierr;
494   PetscInt       blocks,ovl;
495   PetscBool      symset,flg;
496   PCGASMType      gasmtype;
497 
498   PetscFunctionBegin;
499   /* set the type to symmetric if matrix is symmetric */
500   if (!osm->type_set && pc->pmat) {
501     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
502     if (symset && flg) { osm->type = PC_GASM_BASIC; }
503   }
504   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
505     ierr = PetscOptionsInt("-pc_gasm_blocks","Number of subdomains","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
506     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks);CHKERRQ(ierr); }
507     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of grid points overlap","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
508     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
509     flg  = PETSC_FALSE;
510     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
511     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
512   ierr = PetscOptionsTail();CHKERRQ(ierr);
513   PetscFunctionReturn(0);
514 }
515 
516 /*------------------------------------------------------------------------------------*/
517 
518 EXTERN_C_BEGIN
519 #undef __FUNCT__
520 #define __FUNCT__ "PCGASMSetLocalSubdomains_GASM"
521 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains_GASM(PC pc,PetscInt n,IS is[],IS is_local[])
522 {
523   PC_GASM         *osm = (PC_GASM*)pc->data;
524   PetscErrorCode ierr;
525   PetscInt       i;
526 
527   PetscFunctionBegin;
528   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
529   if (pc->setupcalled && (n != osm->n || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetLocalSubdomains() should be called before calling PCSetUp().");
530 
531   if (!pc->setupcalled) {
532     osm->n            = n;
533     osm->is           = 0;
534     osm->is_local     = 0;
535     if (is) {
536       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
537     }
538     if (is_local) {
539       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
540     }
541     if (osm->is) {
542       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
543     }
544     if (is) {
545       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
546       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
547       /* Flag indicating that the user has set overlapping subdomains so PCGASM should not increase their size. */
548       osm->overlap = -1;
549     }
550     if (is_local) {
551       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
552       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
553     }
554   }
555   PetscFunctionReturn(0);
556 }
557 EXTERN_C_END
558 
559 EXTERN_C_BEGIN
560 #undef __FUNCT__
561 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
562 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N) {
563   PC_GASM         *osm = (PC_GASM*)pc->data;
564   PetscErrorCode ierr;
565   PetscMPIInt    rank,size;
566   PetscInt       n;
567 
568   PetscFunctionBegin;
569   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
570 
571   /*
572      Split the subdomains equally among all processors
573   */
574   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
575   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
576   n = N/size + ((N % size) > rank);
577   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
578   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
579   if (!pc->setupcalled) {
580     if (osm->is) {
581       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
582     }
583     osm->N            = N;
584     osm->n            = n;
585     osm->is           = 0;
586     osm->is_local     = 0;
587   }
588   PetscFunctionReturn(0);
589 }/* PCGASMSetTotalSubdomains_GASM() */
590 EXTERN_C_END
591 
592 EXTERN_C_BEGIN
593 #undef __FUNCT__
594 #define __FUNCT__ "PCGASMSetOverlap_GASM"
595 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
596 {
597   PC_GASM *osm = (PC_GASM*)pc->data;
598 
599   PetscFunctionBegin;
600   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
601   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
602   if (!pc->setupcalled) {
603     osm->overlap = ovl;
604   }
605   PetscFunctionReturn(0);
606 }
607 EXTERN_C_END
608 
609 EXTERN_C_BEGIN
610 #undef __FUNCT__
611 #define __FUNCT__ "PCGASMSetType_GASM"
612 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType_GASM(PC pc,PCGASMType type)
613 {
614   PC_GASM *osm = (PC_GASM*)pc->data;
615 
616   PetscFunctionBegin;
617   osm->type     = type;
618   osm->type_set = PETSC_TRUE;
619   PetscFunctionReturn(0);
620 }
621 EXTERN_C_END
622 
623 EXTERN_C_BEGIN
624 #undef __FUNCT__
625 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
626 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
627 {
628   PC_GASM *osm = (PC_GASM*)pc->data;
629 
630   PetscFunctionBegin;
631   osm->sort_indices = doSort;
632   PetscFunctionReturn(0);
633 }
634 EXTERN_C_END
635 
636 EXTERN_C_BEGIN
637 #undef __FUNCT__
638 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
639 /*
640    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
641         In particular, it would upset the global subdomain number calculation.
642 */
643 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
644 {
645   PC_GASM         *osm = (PC_GASM*)pc->data;
646   PetscErrorCode ierr;
647 
648   PetscFunctionBegin;
649   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
650 
651   if (n) {
652     *n = osm->n;
653   }
654   if (first) {
655     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
656     *first -= osm->n;
657   }
658   if (ksp) {
659     /* Assume that local solves are now different; not necessarily
660        true though!  This flag is used only for PCView_GASM() */
661     *ksp                   = osm->ksp;
662     osm->same_local_solves = PETSC_FALSE;
663   }
664   PetscFunctionReturn(0);
665 }/* PCGASMGetSubKSP_GASM() */
666 EXTERN_C_END
667 
668 
669 #undef __FUNCT__
670 #define __FUNCT__ "PCGASMSetLocalSubdomains"
671 /*@C
672     PCGASMSetLocalSubdomains - Sets the local subdomains (for this processor
673     only) for the additive Schwarz preconditioner.
674 
675     Collective on PC
676 
677     Input Parameters:
678 +   pc - the preconditioner context
679 .   n - the number of subdomains for this processor (default value = 1)
680 .   is - the index set that defines the subdomains for this processor
681          (or PETSC_NULL for PETSc to determine subdomains)
682 -   is_local - the index sets that define the local part of the subdomains for this processor
683          (or PETSC_NULL to use the default of 1 subdomain per process)
684 
685     Notes:
686     The IS numbering is in the parallel, global numbering of the vector.
687 
688     By default the GASM preconditioner uses 1 block per processor.
689 
690     Use PCGASMSetTotalSubdomains() to set the subdomains for all processors.
691 
692     Level: advanced
693 
694 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
695 
696 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
697           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
698 @*/
699 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
700 {
701   PetscErrorCode ierr;
702 
703   PetscFunctionBegin;
704   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
705   ierr = PetscTryMethod(pc,"PCGASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
706   PetscFunctionReturn(0);
707 }
708 
709 #undef __FUNCT__
710 #define __FUNCT__ "PCGASMSetTotalSubdomains"
711 /*@C
712     PCGASMSetTotalSubdomains - Sets the subdomains for all processor for the
713     additive Schwarz preconditioner.  Either all or no processors in the
714     PC communicator must call this routine, with the same index sets.
715 
716     Collective on PC
717 
718     Input Parameters:
719 +   pc - the preconditioner context
720 .   n - the number of subdomains for all processors
721 .   is - the index sets that define the subdomains for all processor
722          (or PETSC_NULL for PETSc to determine subdomains)
723 -   is_local - the index sets that define the local part of the subdomains for this processor
724          (or PETSC_NULL to use the default of 1 subdomain per process)
725 
726     Options Database Key:
727     To set the total number of subdomain blocks rather than specify the
728     index sets, use the option
729 .    -pc_gasm_blocks <blks> - Sets total blocks
730 
731     Notes:
732     Currently you cannot use this to set the actual subdomains with the argument is.
733 
734     By default the GASM preconditioner uses 1 block per processor.
735 
736     These index sets cannot be destroyed until after completion of the
737     linear solves for which the GASM preconditioner is being used.
738 
739     Use PCGASMSetLocalSubdomains() to set local subdomains.
740 
741     Level: advanced
742 
743 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
744 
745 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
746           PCGASMCreateSubdomains2D()
747 @*/
748 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains(PC pc,PetscInt N)
749 {
750   PetscErrorCode ierr;
751 
752   PetscFunctionBegin;
753   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
754   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt),(pc,N));CHKERRQ(ierr);
755   PetscFunctionReturn(0);
756 }
757 
758 #undef __FUNCT__
759 #define __FUNCT__ "PCGASMSetOverlap"
760 /*@
761     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
762     additive Schwarz preconditioner.  Either all or no processors in the
763     PC communicator must call this routine.
764 
765     Logically Collective on PC
766 
767     Input Parameters:
768 +   pc  - the preconditioner context
769 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
770 
771     Options Database Key:
772 .   -pc_gasm_overlap <ovl> - Sets overlap
773 
774     Notes:
775     By default the GASM preconditioner uses 1 block per processor.  To use
776     multiple blocks per perocessor, see PCGASMSetTotalSubdomains() and
777     PCGASMSetLocalSubdomains() (and the option -pc_gasm_blocks <blks>).
778 
779     The overlap defaults to 1, so if one desires that no additional
780     overlap be computed beyond what may have been set with a call to
781     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(), then ovl
782     must be set to be 0.  In particular, if one does not explicitly set
783     the subdomains an application code, then all overlap would be computed
784     internally by PETSc, and using an overlap of 0 would result in an GASM
785     variant that is equivalent to the block Jacobi preconditioner.
786 
787     Note that one can define initial index sets with any overlap via
788     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(); the routine
789     PCGASMSetOverlap() merely allows PETSc to extend that overlap further
790     if desired.
791 
792     Level: intermediate
793 
794 .keywords: PC, GASM, set, overlap
795 
796 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
797           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
798 @*/
799 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap(PC pc,PetscInt ovl)
800 {
801   PetscErrorCode ierr;
802 
803   PetscFunctionBegin;
804   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
805   PetscValidLogicalCollectiveInt(pc,ovl,2);
806   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
807   PetscFunctionReturn(0);
808 }
809 
810 #undef __FUNCT__
811 #define __FUNCT__ "PCGASMSetType"
812 /*@
813     PCGASMSetType - Sets the type of restriction and interpolation used
814     for local problems in the additive Schwarz method.
815 
816     Logically Collective on PC
817 
818     Input Parameters:
819 +   pc  - the preconditioner context
820 -   type - variant of GASM, one of
821 .vb
822       PC_GASM_BASIC       - full interpolation and restriction
823       PC_GASM_RESTRICT    - full restriction, local processor interpolation
824       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
825       PC_GASM_NONE        - local processor restriction and interpolation
826 .ve
827 
828     Options Database Key:
829 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
830 
831     Level: intermediate
832 
833 .keywords: PC, GASM, set, type
834 
835 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
836           PCGASMCreateSubdomains2D()
837 @*/
838 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType(PC pc,PCGASMType type)
839 {
840   PetscErrorCode ierr;
841 
842   PetscFunctionBegin;
843   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
844   PetscValidLogicalCollectiveEnum(pc,type,2);
845   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
846   PetscFunctionReturn(0);
847 }
848 
849 #undef __FUNCT__
850 #define __FUNCT__ "PCGASMSetSortIndices"
851 /*@
852     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
853 
854     Logically Collective on PC
855 
856     Input Parameters:
857 +   pc  - the preconditioner context
858 -   doSort - sort the subdomain indices
859 
860     Level: intermediate
861 
862 .keywords: PC, GASM, set, type
863 
864 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
865           PCGASMCreateSubdomains2D()
866 @*/
867 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices(PC pc,PetscBool  doSort)
868 {
869   PetscErrorCode ierr;
870 
871   PetscFunctionBegin;
872   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
873   PetscValidLogicalCollectiveBool(pc,doSort,2);
874   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 
878 #undef __FUNCT__
879 #define __FUNCT__ "PCGASMGetSubKSP"
880 /*@C
881    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
882    this processor.
883 
884    Collective on PC iff first_local is requested
885 
886    Input Parameter:
887 .  pc - the preconditioner context
888 
889    Output Parameters:
890 +  n_local - the number of blocks on this processor or PETSC_NULL
891 .  first_local - the global number of the first block on this processor or PETSC_NULL,
892                  all processors must request or all must pass PETSC_NULL
893 -  ksp - the array of KSP contexts
894 
895    Note:
896    After PCGASMGetSubKSP() the array of KSPes is not to be freed
897 
898    Currently for some matrix implementations only 1 block per processor
899    is supported.
900 
901    You must call KSPSetUp() before calling PCGASMGetSubKSP().
902 
903    Level: advanced
904 
905 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
906 
907 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap(),
908           PCGASMCreateSubdomains2D(),
909 @*/
910 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
911 {
912   PetscErrorCode ierr;
913 
914   PetscFunctionBegin;
915   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
916   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
917   PetscFunctionReturn(0);
918 }
919 
920 /* -------------------------------------------------------------------------------------*/
921 /*MC
922    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
923            its own KSP object.
924 
925    Options Database Keys:
926 +  -pc_gasm_truelocal - Activates PCGGASMSetUseTrueLocal()
927 .  -pc_gasm_blocks <blks> - Sets total blocks
928 .  -pc_gasm_overlap <ovl> - Sets overlap
929 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
930 
931      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
932       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
933       -pc_gasm_type basic to use the standard GASM.
934 
935    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
936      than one processor. Defaults to one block per processor.
937 
938      To set options on the solvers for each block append -sub_ to all the KSP, and PC
939         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
940 
941      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
942          and set the options directly on the resulting KSP object (you can access its PC
943          with KSPGetPC())
944 
945 
946    Level: beginner
947 
948    Concepts: additive Schwarz method
949 
950     References:
951     An additive variant of the Schwarz alternating method for the case of many subregions
952     M Dryja, OB Widlund - Courant Institute, New York University Technical report
953 
954     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
955     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
956 
957 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
958            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetLocalSubdomains(),
959            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
960 
961 M*/
962 
963 EXTERN_C_BEGIN
964 #undef __FUNCT__
965 #define __FUNCT__ "PCCreate_GASM"
966 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_GASM(PC pc)
967 {
968   PetscErrorCode ierr;
969   PC_GASM         *osm;
970 
971   PetscFunctionBegin;
972   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
973   osm->N                 = PETSC_DECIDE;
974   osm->n                 = 0;
975   osm->nmax              = 0;
976   osm->overlap           = 1;
977   osm->ksp               = 0;
978   osm->grestriction      = 0;
979   osm->gprolongation     = 0;
980   osm->gx                = 0;
981   osm->gy                = 0;
982   osm->x                 = 0;
983   osm->y                 = 0;
984   osm->is                = 0;
985   osm->is_local          = 0;
986   osm->pmat              = 0;
987   osm->type              = PC_GASM_RESTRICT;
988   osm->same_local_solves = PETSC_TRUE;
989   osm->sort_indices      = PETSC_TRUE;
990 
991   pc->data                   = (void*)osm;
992   pc->ops->apply             = PCApply_GASM;
993   pc->ops->applytranspose    = PCApplyTranspose_GASM;
994   pc->ops->setup             = PCSetUp_GASM;
995   pc->ops->destroy           = PCDestroy_GASM;
996   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
997   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
998   pc->ops->view              = PCView_GASM;
999   pc->ops->applyrichardson   = 0;
1000 
1001   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetLocalSubdomains_C","PCGASMSetLocalSubdomains_GASM",
1002                     PCGASMSetLocalSubdomains_GASM);CHKERRQ(ierr);
1003   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
1004                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1005   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1006                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1007   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1008                     PCGASMSetType_GASM);CHKERRQ(ierr);
1009   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1010                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1011   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1012                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1013   PetscFunctionReturn(0);
1014 }
1015 EXTERN_C_END
1016 
1017 
1018 #undef __FUNCT__
1019 #define __FUNCT__ "PCGASMCreateSubdomains"
1020 /*@C
1021    PCGASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1022    preconditioner for a any problem on a general grid.
1023 
1024    Collective
1025 
1026    Input Parameters:
1027 +  A - The global matrix operator
1028 -  n - the number of local blocks
1029 
1030    Output Parameters:
1031 .  outis - the array of index sets defining the subdomains
1032 
1033    Level: advanced
1034 
1035    Note: this generates nonoverlapping subdomains; the PCGASM will generate the overlap
1036     from these if you use PCGASMSetLocalSubdomains()
1037 
1038     In the Fortran version you must provide the array outis[] already allocated of length n.
1039 
1040 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1041 
1042 .seealso: PCGASMSetLocalSubdomains(), PCGASMDestroySubdomains()
1043 @*/
1044 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1045 {
1046   MatPartitioning           mpart;
1047   const char                *prefix;
1048   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1049   PetscMPIInt               size;
1050   PetscInt                  i,j,rstart,rend,bs;
1051   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1052   Mat                       Ad = PETSC_NULL, adj;
1053   IS                        ispart,isnumb,*is;
1054   PetscErrorCode            ierr;
1055 
1056   PetscFunctionBegin;
1057   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1058   PetscValidPointer(outis,3);
1059   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1060 
1061   /* Get prefix, row distribution, and block size */
1062   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1063   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1064   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1065   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1066 
1067   /* Get diagonal block from matrix if possible */
1068   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1069   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1070   if (f) {
1071     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1072   } else if (size == 1) {
1073     iscopy = PETSC_FALSE; Ad = A;
1074   } else {
1075     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1076   }
1077   if (Ad) {
1078     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1079     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1080   }
1081   if (Ad && n > 1) {
1082     PetscBool  match,done;
1083     /* Try to setup a good matrix partitioning if available */
1084     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1085     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1086     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1087     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1088     if (!match) {
1089       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1090     }
1091     if (!match) { /* assume a "good" partitioner is available */
1092       PetscInt na,*ia,*ja;
1093       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1094       if (done) {
1095 	/* Build adjacency matrix by hand. Unfortunately a call to
1096 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1097 	   remove the block-aij structure and we cannot expect
1098 	   MatPartitioning to split vertices as we need */
1099 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1100 	nnz = 0;
1101 	for (i=0; i<na; i++) { /* count number of nonzeros */
1102 	  len = ia[i+1] - ia[i];
1103 	  row = ja + ia[i];
1104 	  for (j=0; j<len; j++) {
1105 	    if (row[j] == i) { /* don't count diagonal */
1106 	      len--; break;
1107 	    }
1108 	  }
1109 	  nnz += len;
1110 	}
1111 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1112 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1113 	nnz    = 0;
1114 	iia[0] = 0;
1115 	for (i=0; i<na; i++) { /* fill adjacency */
1116 	  cnt = 0;
1117 	  len = ia[i+1] - ia[i];
1118 	  row = ja + ia[i];
1119 	  for (j=0; j<len; j++) {
1120 	    if (row[j] != i) { /* if not diagonal */
1121 	      jja[nnz+cnt++] = row[j];
1122 	    }
1123 	  }
1124 	  nnz += cnt;
1125 	  iia[i+1] = nnz;
1126 	}
1127 	/* Partitioning of the adjacency matrix */
1128 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1129 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1130 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1131 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1132 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1133 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1134 	foundpart = PETSC_TRUE;
1135       }
1136       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1137     }
1138     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1139   }
1140   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1141 
1142   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1143   *outis = is;
1144 
1145   if (!foundpart) {
1146 
1147     /* Partitioning by contiguous chunks of rows */
1148 
1149     PetscInt mbs   = (rend-rstart)/bs;
1150     PetscInt start = rstart;
1151     for (i=0; i<n; i++) {
1152       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1153       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1154       start += count;
1155     }
1156 
1157   } else {
1158 
1159     /* Partitioning by adjacency of diagonal block  */
1160 
1161     const PetscInt *numbering;
1162     PetscInt       *count,nidx,*indices,*newidx,start=0;
1163     /* Get node count in each partition */
1164     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1165     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1166     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1167       for (i=0; i<n; i++) count[i] *= bs;
1168     }
1169     /* Build indices from node numbering */
1170     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1171     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1172     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1173     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1174     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1175     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1176     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1177       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1178       for (i=0; i<nidx; i++)
1179 	for (j=0; j<bs; j++)
1180 	  newidx[i*bs+j] = indices[i]*bs + j;
1181       ierr = PetscFree(indices);CHKERRQ(ierr);
1182       nidx   *= bs;
1183       indices = newidx;
1184     }
1185     /* Shift to get global indices */
1186     for (i=0; i<nidx; i++) indices[i] += rstart;
1187 
1188     /* Build the index sets for each block */
1189     for (i=0; i<n; i++) {
1190       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1191       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1192       start += count[i];
1193     }
1194 
1195     ierr = PetscFree(count);
1196     ierr = PetscFree(indices);
1197     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1198     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1199 
1200   }
1201 
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "PCGASMDestroySubdomains"
1207 /*@C
1208    PCGASMDestroySubdomains - Destroys the index sets created with
1209    PCGASMCreateSubdomains(). Should be called after setting subdomains
1210    with PCGASMSetLocalSubdomains().
1211 
1212    Collective
1213 
1214    Input Parameters:
1215 +  n - the number of index sets
1216 .  is - the array of index sets
1217 -  is_local - the array of local index sets, can be PETSC_NULL
1218 
1219    Level: advanced
1220 
1221 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1222 
1223 .seealso: PCGASMCreateSubdomains(), PCGASMSetLocalSubdomains()
1224 @*/
1225 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1226 {
1227   PetscInt       i;
1228   PetscErrorCode ierr;
1229   PetscFunctionBegin;
1230   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1231   PetscValidPointer(is,2);
1232   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1233   ierr = PetscFree(is);CHKERRQ(ierr);
1234   if (is_local) {
1235     PetscValidPointer(is_local,3);
1236     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1237     ierr = PetscFree(is_local);CHKERRQ(ierr);
1238   }
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 #undef __FUNCT__
1243 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1244 /*@
1245    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1246    preconditioner for a two-dimensional problem on a regular grid.
1247 
1248    Not Collective
1249 
1250    Input Parameters:
1251 +  m, n - the number of mesh points in the x and y directions
1252 .  M, N - the number of subdomains in the x and y directions
1253 .  dof - degrees of freedom per node
1254 -  overlap - overlap in mesh lines
1255 
1256    Output Parameters:
1257 +  Nsub - the number of subdomains created
1258 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1259 -  is_local - array of index sets defining non-overlapping subdomains
1260 
1261    Note:
1262    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1263    preconditioners.  More general related routines are
1264    PCGASMSetTotalSubdomains() and PCGASMSetLocalSubdomains().
1265 
1266    Level: advanced
1267 
1268 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1269 
1270 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
1271           PCGASMSetOverlap()
1272 @*/
1273 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1274 {
1275   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1276   PetscErrorCode ierr;
1277   PetscInt       nidx,*idx,loc,ii,jj,count;
1278 
1279   PetscFunctionBegin;
1280   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1281 
1282   *Nsub = N*M;
1283   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1284   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1285   ystart = 0;
1286   loc_outer = 0;
1287   for (i=0; i<N; i++) {
1288     height = n/N + ((n % N) > i); /* height of subdomain */
1289     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1290     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1291     yright = ystart + height + overlap; if (yright > n) yright = n;
1292     xstart = 0;
1293     for (j=0; j<M; j++) {
1294       width = m/M + ((m % M) > j); /* width of subdomain */
1295       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1296       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1297       xright = xstart + width + overlap; if (xright > m) xright = m;
1298       nidx   = (xright - xleft)*(yright - yleft);
1299       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1300       loc    = 0;
1301       for (ii=yleft; ii<yright; ii++) {
1302         count = m*ii + xleft;
1303         for (jj=xleft; jj<xright; jj++) {
1304           idx[loc++] = count++;
1305         }
1306       }
1307       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1308       if (overlap == 0) {
1309         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1310         (*is_local)[loc_outer] = (*is)[loc_outer];
1311       } else {
1312         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1313           for (jj=xstart; jj<xstart+width; jj++) {
1314             idx[loc++] = m*ii + jj;
1315           }
1316         }
1317         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1318       }
1319       ierr = PetscFree(idx);CHKERRQ(ierr);
1320       xstart += width;
1321       loc_outer++;
1322     }
1323     ystart += height;
1324   }
1325   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1326   PetscFunctionReturn(0);
1327 }
1328 
1329 #undef __FUNCT__
1330 #define __FUNCT__ "PCGASMGetLocalSubdomains"
1331 /*@C
1332     PCGASMGetLocalSubdomains - Gets the local subdomains (for this processor
1333     only) for the additive Schwarz preconditioner.
1334 
1335     Not Collective
1336 
1337     Input Parameter:
1338 .   pc - the preconditioner context
1339 
1340     Output Parameters:
1341 +   n - the number of subdomains for this processor (default value = 1)
1342 .   is - the index sets that define the subdomains for this processor
1343 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1344 
1345 
1346     Notes:
1347     The IS numbering is in the parallel, global numbering of the vector.
1348 
1349     Level: advanced
1350 
1351 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
1352 
1353 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1354           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubmatrices()
1355 @*/
1356 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1357 {
1358   PC_GASM         *osm;
1359   PetscErrorCode ierr;
1360   PetscBool      match;
1361 
1362   PetscFunctionBegin;
1363   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1364   PetscValidIntPointer(n,2);
1365   if (is) PetscValidPointer(is,3);
1366   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1367   if (!match) {
1368     if (n)  *n  = 0;
1369     if (is) *is = PETSC_NULL;
1370   } else {
1371     osm = (PC_GASM*)pc->data;
1372     if (n)  *n  = osm->n;
1373     if (is) *is = osm->is;
1374     if (is_local) *is_local = osm->is_local;
1375   }
1376   PetscFunctionReturn(0);
1377 }
1378 
1379 #undef __FUNCT__
1380 #define __FUNCT__ "PCGASMGetLocalSubmatrices"
1381 /*@C
1382     PCGASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1383     only) for the additive Schwarz preconditioner.
1384 
1385     Not Collective
1386 
1387     Input Parameter:
1388 .   pc - the preconditioner context
1389 
1390     Output Parameters:
1391 +   n - the number of matrices for this processor (default value = 1)
1392 -   mat - the matrices
1393 
1394 
1395     Level: advanced
1396 
1397 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1398 
1399 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1400           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubdomains()
1401 @*/
1402 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1403 {
1404   PC_GASM         *osm;
1405   PetscErrorCode ierr;
1406   PetscBool      match;
1407 
1408   PetscFunctionBegin;
1409   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1410   PetscValidIntPointer(n,2);
1411   if (mat) PetscValidPointer(mat,3);
1412   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1413   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1414   if (!match) {
1415     if (n)   *n   = 0;
1416     if (mat) *mat = PETSC_NULL;
1417   } else {
1418     osm = (PC_GASM*)pc->data;
1419     if (n)   *n   = osm->n;
1420     if (mat) *mat = osm->pmat;
1421   }
1422   PetscFunctionReturn(0);
1423 }
1424