xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 5b3c0d426ef84c76d57947eaebec531d6acb0f6f)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion; used for restrict version of algorithms */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
34   /* For multiplicative solve */
35   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
36   Vec        lx, ly;              /* work vectors */
37   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
38   VecScatter *lprolongation;      /* mapping from subregion to overlapping process subdomain */
39 } PC_ASM;
40 
41 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42 {
43   PC_ASM         *osm = (PC_ASM*)pc->data;
44   PetscErrorCode ierr;
45   PetscMPIInt    rank;
46   PetscInt       i,bsz;
47   PetscBool      iascii,isstring;
48   PetscViewer    sviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
56     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
57     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
59     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
60     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
61     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
62     if (osm->same_local_solves) {
63       if (osm->ksp) {
64         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
65         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
66         if (!rank) {
67           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
68           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
69           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
70         }
71         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
72       }
73     } else {
74       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
75       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
76       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
79       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
81       for (i=0; i<osm->n_local_true; i++) {
82         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
83         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
84         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
85         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
86       }
87       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
88       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
91     }
92   } else if (isstring) {
93     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
94     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
95     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
96     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
97   }
98   PetscFunctionReturn(0);
99 }
100 
101 static PetscErrorCode PCASMPrintSubdomains(PC pc)
102 {
103   PC_ASM         *osm = (PC_ASM*)pc->data;
104   const char     *prefix;
105   char           fname[PETSC_MAX_PATH_LEN+1];
106   PetscViewer    viewer, sviewer;
107   char           *s;
108   PetscInt       i,j,nidx;
109   const PetscInt *idx;
110   PetscMPIInt    rank, size;
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
115   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
116   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
117   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
118   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
119   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
120   for (i=0; i<osm->n_local; i++) {
121     if (i < osm->n_local_true) {
122       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
123       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
124       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
125       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
126       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
127       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
128       for (j=0; j<nidx; j++) {
129         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
130       }
131       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
132       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
133       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
136       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
138       ierr = PetscFree(s);CHKERRQ(ierr);
139       if (osm->is_local) {
140         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
141         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
142         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
143         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
144         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
145         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
146         for (j=0; j<nidx; j++) {
147           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
148         }
149         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
150         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
151         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
152         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
153         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
154         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
156         ierr = PetscFree(s);CHKERRQ(ierr);
157       }
158     } else {
159       /* Participate in collective viewer calls. */
160       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
161       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
162       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
163       /* Assume either all ranks have is_local or none do. */
164       if (osm->is_local) {
165         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
166         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
167         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
168       }
169     }
170   }
171   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
172   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 static PetscErrorCode PCSetUp_ASM(PC pc)
177 {
178   PC_ASM         *osm = (PC_ASM*)pc->data;
179   PetscErrorCode ierr;
180   PetscBool      symset,flg;
181   PetscInt       i,m,m_local;
182   MatReuse       scall = MAT_REUSE_MATRIX;
183   IS             isl;
184   KSP            ksp;
185   PC             subpc;
186   const char     *prefix,*pprefix;
187   Vec            vec;
188   DM             *domain_dm = NULL;
189 
190   PetscFunctionBegin;
191   if (!pc->setupcalled) {
192 
193     if (!osm->type_set) {
194       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
195       if (symset && flg) osm->type = PC_ASM_BASIC;
196     }
197 
198     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
199     if (osm->n_local_true == PETSC_DECIDE) {
200       /* no subdomains given */
201       /* try pc->dm first, if allowed */
202       if (osm->dm_subdomains && pc->dm) {
203         PetscInt  num_domains, d;
204         char      **domain_names;
205         IS        *inner_domain_is, *outer_domain_is;
206         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
207         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
208                               A future improvement of this code might allow one to use
209                               DM-defined subdomains and also increase the overlap,
210                               but that is not currently supported */
211         if (num_domains) {
212           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
213         }
214         for (d = 0; d < num_domains; ++d) {
215           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
216           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
217           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
218         }
219         ierr = PetscFree(domain_names);CHKERRQ(ierr);
220         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
221         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
222       }
223       if (osm->n_local_true == PETSC_DECIDE) {
224         /* still no subdomains; use one subdomain per processor */
225         osm->n_local_true = 1;
226       }
227     }
228     { /* determine the global and max number of subdomains */
229       struct {PetscInt max,sum;} inwork,outwork;
230       PetscMPIInt size;
231 
232       inwork.max   = osm->n_local_true;
233       inwork.sum   = osm->n_local_true;
234       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
235       osm->n_local = outwork.max;
236       osm->n       = outwork.sum;
237 
238       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
239       if (outwork.max == 1 && outwork.sum == size) {
240         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
241         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
242       }
243     }
244     if (!osm->is) { /* create the index sets */
245       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
246     }
247     if (osm->n_local_true > 1 && !osm->is_local) {
248       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
249       for (i=0; i<osm->n_local_true; i++) {
250         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
251           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
252           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
253         } else {
254           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
255           osm->is_local[i] = osm->is[i];
256         }
257       }
258     }
259     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
260     flg  = PETSC_FALSE;
261     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
262     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
263 
264     if (osm->overlap > 0) {
265       /* Extend the "overlapping" regions by a number of steps */
266       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
267     }
268     if (osm->sort_indices) {
269       for (i=0; i<osm->n_local_true; i++) {
270         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
271         if (osm->is_local) {
272           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
273         }
274       }
275     }
276 
277     if (!osm->ksp) {
278       /* Create the local solvers */
279       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
280       if (domain_dm) {
281         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
282       }
283       for (i=0; i<osm->n_local_true; i++) {
284         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
285         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
286         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
287         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
288         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
289         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
290         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
291         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
292         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
293         if (domain_dm) {
294           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
295           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
296           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
297         }
298         osm->ksp[i] = ksp;
299       }
300       if (domain_dm) {
301         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
302       }
303     }
304     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
305       PetscInt m;
306       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
307       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
308       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
309       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
310       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
311     }
312     scall = MAT_INITIAL_MATRIX;
313   } else {
314     /*
315        Destroy the blocks from the previous iteration
316     */
317     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
318       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
319       scall = MAT_INITIAL_MATRIX;
320     }
321   }
322 
323   /*
324      Extract out the submatrices
325   */
326   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
327   if (scall == MAT_INITIAL_MATRIX) {
328     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
329     for (i=0; i<osm->n_local_true; i++) {
330       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
331       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
332     }
333   }
334 
335   /* Convert the types of the submatrices (if needbe) */
336   if (osm->sub_mat_type) {
337     for (i=0; i<osm->n_local_true; i++) {
338       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
339     }
340   }
341 
342   if(!pc->setupcalled){
343     /* Create the local work vectors (from the local matrices) and scatter contexts */
344     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
345     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
346     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
347     if (osm->is_local && osm->type == PC_ASM_RESTRICT) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
348     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
349     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE)
350       ierr = PetscMalloc1(osm->n_local,&osm->lprolongation);CHKERRQ(ierr);
351 
352     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
353     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
354     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
355     for (i=0; i<osm->n_local_true; ++i) {
356       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
357       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
358       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
359       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
360       ierr = ISDestroy(&isl);CHKERRQ(ierr);
361       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
362       if (osm->localization) {
363         ISLocalToGlobalMapping ltog;
364         IS                     isll;
365         const PetscInt         *idx_local;
366         PetscInt               *idx,nout;
367 
368         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
369         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
370         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
371         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
372         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
373         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
374         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
375         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
376         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
377         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
378         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
379         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
380         ierr = ISDestroy(&isll);CHKERRQ(ierr);
381 
382         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
383         ierr = ISDestroy(&isl);CHKERRQ(ierr);
384       } else {
385         osm->y_local[i] = osm->y[i];
386         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
387         osm->prolongation[i] = osm->restriction[i];
388         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
389       }
390 
391       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE && i < osm->n_local_true - 1) {
392         ISLocalToGlobalMapping ltog;
393         IS                     isll;
394         const PetscInt         *idx_is;
395         PetscInt               *idx_lis,nout;
396 
397         ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
398         ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
399         ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
400         ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
401         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
402         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
403         if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
404         ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
405         ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
406         ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
407         ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lprolongation[i]);CHKERRQ(ierr);
408         ierr = ISDestroy(&isll);CHKERRQ(ierr);
409         ierr = ISDestroy(&isl);CHKERRQ(ierr);
410       }
411 
412     }
413     for (i=osm->n_local_true; i<osm->n_local; i++) {
414       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
415       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
416       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
417       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
418       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
419       if (osm->localization) {
420         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
421         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
422       } else {
423         osm->prolongation[i] = osm->restriction[i];
424         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
425       }
426       ierr = ISDestroy(&isl);CHKERRQ(ierr);
427     }
428     ierr = VecDestroy(&vec);CHKERRQ(ierr);
429   }
430 
431   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
432     IS      *cis;
433     PetscInt c;
434 
435     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
436     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
437     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
438     ierr = PetscFree(cis);CHKERRQ(ierr);
439   }
440 
441   /* Return control to the user so that the submatrices can be modified (e.g., to apply
442      different boundary conditions for the submatrices than for the global problem) */
443   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
444 
445   /*
446      Loop over subdomains putting them into local ksp
447   */
448   for (i=0; i<osm->n_local_true; i++) {
449     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
450     if (!pc->setupcalled) {
451       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
452     }
453   }
454   PetscFunctionReturn(0);
455 }
456 
457 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
458 {
459   PC_ASM             *osm = (PC_ASM*)pc->data;
460   PetscErrorCode     ierr;
461   PetscInt           i;
462   KSPConvergedReason reason;
463 
464   PetscFunctionBegin;
465   for (i=0; i<osm->n_local_true; i++) {
466     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
467     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
468     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
469       pc->failedreason = PC_SUBPC_ERROR;
470     }
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
476 {
477   PC_ASM         *osm = (PC_ASM*)pc->data;
478   PetscErrorCode ierr;
479   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
480   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
481 
482   PetscFunctionBegin;
483   /*
484      Support for limiting the restriction or interpolation to only local
485      subdomain values (leaving the other values 0).
486   */
487   if (!(osm->type & PC_ASM_RESTRICT)) {
488     forward = SCATTER_FORWARD_LOCAL;
489     /* have to zero the work RHS since scatter may leave some slots empty */
490     for (i=0; i<n_local_true; i++) {
491       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
492     }
493   }
494   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
495 
496   switch (osm->loctype)
497   {
498   case PC_COMPOSITE_ADDITIVE:
499     for (i=0; i<n_local; i++) {
500       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
501     }
502     ierr = VecZeroEntries(y);CHKERRQ(ierr);
503     /* do the local solves */
504     for (i=0; i<n_local_true; i++) {
505       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
506       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
507       if (osm->localization) {
508         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
509         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
510       }
511       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
512     }
513     /* handle the rest of the scatters that do not have local solves */
514     for (i=n_local_true; i<n_local; i++) {
515       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
516       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
517     }
518     for (i=0; i<n_local; i++) {
519       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
520     }
521     break;
522   case PC_COMPOSITE_MULTIPLICATIVE:
523     ierr = VecZeroEntries(y);CHKERRQ(ierr);
524     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
525     /* do the local solves */
526     for (i = 0; i < n_local_true; ++i) {
527       if (i == 0) {
528         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
529       }
530       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
531       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
532       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
533       if (osm->localization) {
534         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
535         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
536       }
537       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
538       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
539       if (i < n_local_true-1) {
540 	ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
541         ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
542 	ierr = VecCopy(osm->ly, osm->lx);CHKERRQ(ierr);
543         ierr = VecScale(osm->lx, -1.0);CHKERRQ(ierr);
544         ierr = MatMult(osm->lmats[i+1], osm->lx, osm->x[i+1]);CHKERRQ(ierr);
545       }
546     }
547     /* handle the rest of the scatters that do not have local solves */
548     for (i = n_local_true; i < n_local; ++i) {
549       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
550       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
551       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
552       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
553     }
554     break;
555   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
556   }
557   PetscFunctionReturn(0);
558 }
559 
560 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
561 {
562   PC_ASM         *osm = (PC_ASM*)pc->data;
563   PetscErrorCode ierr;
564   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
565   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
566 
567   PetscFunctionBegin;
568   /*
569      Support for limiting the restriction or interpolation to only local
570      subdomain values (leaving the other values 0).
571 
572      Note: these are reversed from the PCApply_ASM() because we are applying the
573      transpose of the three terms
574   */
575   if (!(osm->type & PC_ASM_INTERPOLATE)) {
576     forward = SCATTER_FORWARD_LOCAL;
577     /* have to zero the work RHS since scatter may leave some slots empty */
578     for (i=0; i<n_local_true; i++) {
579       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
580     }
581   }
582   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
583 
584   for (i=0; i<n_local; i++) {
585     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
586   }
587   ierr = VecZeroEntries(y);CHKERRQ(ierr);
588   /* do the local solves */
589   for (i=0; i<n_local_true; i++) {
590     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
591     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
592     if (osm->localization) {
593       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
594       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
595     }
596     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
597   }
598   /* handle the rest of the scatters that do not have local solves */
599   for (i=n_local_true; i<n_local; i++) {
600     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
601     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
602   }
603   for (i=0; i<n_local; i++) {
604     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
605   }
606   PetscFunctionReturn(0);
607 }
608 
609 static PetscErrorCode PCReset_ASM(PC pc)
610 {
611   PC_ASM         *osm = (PC_ASM*)pc->data;
612   PetscErrorCode ierr;
613   PetscInt       i;
614 
615   PetscFunctionBegin;
616   if (osm->ksp) {
617     for (i=0; i<osm->n_local_true; i++) {
618       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
619     }
620   }
621   if (osm->pmat) {
622     if (osm->n_local_true > 0) {
623       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
624     }
625   }
626   if (osm->restriction) {
627     for (i=0; i<osm->n_local; i++) {
628       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
629       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
630       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
631       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE && i < osm->n_local_true - 1){
632 	ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);
633       }
634       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
635       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
636       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
637     }
638     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
639     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
640     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
641     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
642       ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);
643     }
644     ierr = PetscFree(osm->x);CHKERRQ(ierr);
645     ierr = PetscFree(osm->y);CHKERRQ(ierr);
646     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
647   }
648   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
649   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
650     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
651     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
652     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
653     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
654   }
655 
656   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
657 
658   osm->is       = 0;
659   osm->is_local = 0;
660   PetscFunctionReturn(0);
661 }
662 
663 static PetscErrorCode PCDestroy_ASM(PC pc)
664 {
665   PC_ASM         *osm = (PC_ASM*)pc->data;
666   PetscErrorCode ierr;
667   PetscInt       i;
668 
669   PetscFunctionBegin;
670   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
671   if (osm->ksp) {
672     for (i=0; i<osm->n_local_true; i++) {
673       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
674     }
675     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
676   }
677   ierr = PetscFree(pc->data);CHKERRQ(ierr);
678   PetscFunctionReturn(0);
679 }
680 
681 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
682 {
683   PC_ASM         *osm = (PC_ASM*)pc->data;
684   PetscErrorCode ierr;
685   PetscInt       blocks,ovl;
686   PetscBool      symset,flg;
687   PCASMType      asmtype;
688   PCCompositeType loctype;
689   char           sub_mat_type[256];
690 
691   PetscFunctionBegin;
692   /* set the type to symmetric if matrix is symmetric */
693   if (!osm->type_set && pc->pmat) {
694     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
695     if (symset && flg) osm->type = PC_ASM_BASIC;
696   }
697   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
698   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
699   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
700   if (flg) {
701     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
702     osm->dm_subdomains = PETSC_FALSE;
703   }
704   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
705   if (flg) {
706     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
707     osm->dm_subdomains = PETSC_FALSE;
708   }
709   flg  = PETSC_FALSE;
710   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
711   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
712   flg  = PETSC_FALSE;
713   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
714   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
715   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
716   if(flg){
717     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
718   }
719   ierr = PetscOptionsTail();CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 /*------------------------------------------------------------------------------------*/
724 
725 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
726 {
727   PC_ASM         *osm = (PC_ASM*)pc->data;
728   PetscErrorCode ierr;
729   PetscInt       i;
730 
731   PetscFunctionBegin;
732   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
733   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
734 
735   if (!pc->setupcalled) {
736     if (is) {
737       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
738     }
739     if (is_local) {
740       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
741     }
742     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
743 
744     osm->n_local_true = n;
745     osm->is           = 0;
746     osm->is_local     = 0;
747     if (is) {
748       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
749       for (i=0; i<n; i++) osm->is[i] = is[i];
750       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
751       osm->overlap = -1;
752     }
753     if (is_local) {
754       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
755       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
756       if (!is) {
757         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
758         for (i=0; i<osm->n_local_true; i++) {
759           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
760             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
761             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
762           } else {
763             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
764             osm->is[i] = osm->is_local[i];
765           }
766         }
767       }
768     }
769   }
770   PetscFunctionReturn(0);
771 }
772 
773 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
774 {
775   PC_ASM         *osm = (PC_ASM*)pc->data;
776   PetscErrorCode ierr;
777   PetscMPIInt    rank,size;
778   PetscInt       n;
779 
780   PetscFunctionBegin;
781   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
782   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
783 
784   /*
785      Split the subdomains equally among all processors
786   */
787   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
788   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
789   n    = N/size + ((N % size) > rank);
790   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
791   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
792   if (!pc->setupcalled) {
793     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
794 
795     osm->n_local_true = n;
796     osm->is           = 0;
797     osm->is_local     = 0;
798   }
799   PetscFunctionReturn(0);
800 }
801 
802 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
803 {
804   PC_ASM *osm = (PC_ASM*)pc->data;
805 
806   PetscFunctionBegin;
807   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
808   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
809   if (!pc->setupcalled) osm->overlap = ovl;
810   PetscFunctionReturn(0);
811 }
812 
813 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
814 {
815   PC_ASM *osm = (PC_ASM*)pc->data;
816 
817   PetscFunctionBegin;
818   osm->type     = type;
819   osm->type_set = PETSC_TRUE;
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
824 {
825   PC_ASM *osm = (PC_ASM*)pc->data;
826 
827   PetscFunctionBegin;
828   *type = osm->type;
829   PetscFunctionReturn(0);
830 }
831 
832 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
833 {
834   PC_ASM *osm = (PC_ASM *) pc->data;
835 
836   PetscFunctionBegin;
837   osm->loctype = type;
838   PetscFunctionReturn(0);
839 }
840 
841 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
842 {
843   PC_ASM *osm = (PC_ASM *) pc->data;
844 
845   PetscFunctionBegin;
846   *type = osm->loctype;
847   PetscFunctionReturn(0);
848 }
849 
850 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
851 {
852   PC_ASM *osm = (PC_ASM*)pc->data;
853 
854   PetscFunctionBegin;
855   osm->sort_indices = doSort;
856   PetscFunctionReturn(0);
857 }
858 
859 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
860 {
861   PC_ASM         *osm = (PC_ASM*)pc->data;
862   PetscErrorCode ierr;
863 
864   PetscFunctionBegin;
865   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
866 
867   if (n_local) *n_local = osm->n_local_true;
868   if (first_local) {
869     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
870     *first_local -= osm->n_local_true;
871   }
872   if (ksp) {
873     /* Assume that local solves are now different; not necessarily
874        true though!  This flag is used only for PCView_ASM() */
875     *ksp                   = osm->ksp;
876     osm->same_local_solves = PETSC_FALSE;
877   }
878   PetscFunctionReturn(0);
879 }
880 
881 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
882 {
883   PC_ASM         *osm = (PC_ASM*)pc->data;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
887   PetscValidPointer(sub_mat_type,2);
888   *sub_mat_type = osm->sub_mat_type;
889   PetscFunctionReturn(0);
890 }
891 
892 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
893 {
894   PetscErrorCode    ierr;
895   PC_ASM            *osm = (PC_ASM*)pc->data;
896 
897   PetscFunctionBegin;
898   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
899   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
900   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
901   PetscFunctionReturn(0);
902 }
903 
904 /*@C
905     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
906 
907     Collective on PC
908 
909     Input Parameters:
910 +   pc - the preconditioner context
911 .   n - the number of subdomains for this processor (default value = 1)
912 .   is - the index set that defines the subdomains for this processor
913          (or NULL for PETSc to determine subdomains)
914 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
915          (or NULL to not provide these)
916 
917     Notes:
918     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
919 
920     By default the ASM preconditioner uses 1 block per processor.
921 
922     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
923 
924     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
925     back to form the global solution (this is the standard restricted additive Schwarz method)
926 
927     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
928     no code to handle that case.
929 
930     Level: advanced
931 
932 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
933 
934 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
935           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
936 @*/
937 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
938 {
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
943   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
944   PetscFunctionReturn(0);
945 }
946 
947 /*@C
948     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
949     additive Schwarz preconditioner.  Either all or no processors in the
950     PC communicator must call this routine, with the same index sets.
951 
952     Collective on PC
953 
954     Input Parameters:
955 +   pc - the preconditioner context
956 .   N  - the number of subdomains for all processors
957 .   is - the index sets that define the subdomains for all processors
958          (or NULL to ask PETSc to determine the subdomains)
959 -   is_local - the index sets that define the local part of the subdomains for this processor
960          (or NULL to not provide this information)
961 
962     Options Database Key:
963     To set the total number of subdomain blocks rather than specify the
964     index sets, use the option
965 .    -pc_asm_blocks <blks> - Sets total blocks
966 
967     Notes:
968     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
969 
970     By default the ASM preconditioner uses 1 block per processor.
971 
972     These index sets cannot be destroyed until after completion of the
973     linear solves for which the ASM preconditioner is being used.
974 
975     Use PCASMSetLocalSubdomains() to set local subdomains.
976 
977     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
978 
979     Level: advanced
980 
981 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
982 
983 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
984           PCASMCreateSubdomains2D()
985 @*/
986 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
987 {
988   PetscErrorCode ierr;
989 
990   PetscFunctionBegin;
991   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
992   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
993   PetscFunctionReturn(0);
994 }
995 
996 /*@
997     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
998     additive Schwarz preconditioner.  Either all or no processors in the
999     PC communicator must call this routine.
1000 
1001     Logically Collective on PC
1002 
1003     Input Parameters:
1004 +   pc  - the preconditioner context
1005 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1006 
1007     Options Database Key:
1008 .   -pc_asm_overlap <ovl> - Sets overlap
1009 
1010     Notes:
1011     By default the ASM preconditioner uses 1 block per processor.  To use
1012     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1013     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1014 
1015     The overlap defaults to 1, so if one desires that no additional
1016     overlap be computed beyond what may have been set with a call to
1017     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1018     must be set to be 0.  In particular, if one does not explicitly set
1019     the subdomains an application code, then all overlap would be computed
1020     internally by PETSc, and using an overlap of 0 would result in an ASM
1021     variant that is equivalent to the block Jacobi preconditioner.
1022 
1023     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1024     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1025 
1026     Note that one can define initial index sets with any overlap via
1027     PCASMSetLocalSubdomains(); the routine
1028     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1029     if desired.
1030 
1031     Level: intermediate
1032 
1033 .keywords: PC, ASM, set, overlap
1034 
1035 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1036           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1037 @*/
1038 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1039 {
1040   PetscErrorCode ierr;
1041 
1042   PetscFunctionBegin;
1043   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1044   PetscValidLogicalCollectiveInt(pc,ovl,2);
1045   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1046   PetscFunctionReturn(0);
1047 }
1048 
1049 /*@
1050     PCASMSetType - Sets the type of restriction and interpolation used
1051     for local problems in the additive Schwarz method.
1052 
1053     Logically Collective on PC
1054 
1055     Input Parameters:
1056 +   pc  - the preconditioner context
1057 -   type - variant of ASM, one of
1058 .vb
1059       PC_ASM_BASIC       - full interpolation and restriction
1060       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1061       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1062       PC_ASM_NONE        - local processor restriction and interpolation
1063 .ve
1064 
1065     Options Database Key:
1066 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1067 
1068     Notes: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1069     to limit the local processor interpolation
1070 
1071     Level: intermediate
1072 
1073 .keywords: PC, ASM, set, type
1074 
1075 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1076           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1077 @*/
1078 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1079 {
1080   PetscErrorCode ierr;
1081 
1082   PetscFunctionBegin;
1083   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1084   PetscValidLogicalCollectiveEnum(pc,type,2);
1085   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1086   PetscFunctionReturn(0);
1087 }
1088 
1089 /*@
1090     PCASMGetType - Gets the type of restriction and interpolation used
1091     for local problems in the additive Schwarz method.
1092 
1093     Logically Collective on PC
1094 
1095     Input Parameter:
1096 .   pc  - the preconditioner context
1097 
1098     Output Parameter:
1099 .   type - variant of ASM, one of
1100 
1101 .vb
1102       PC_ASM_BASIC       - full interpolation and restriction
1103       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1104       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1105       PC_ASM_NONE        - local processor restriction and interpolation
1106 .ve
1107 
1108     Options Database Key:
1109 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1110 
1111     Level: intermediate
1112 
1113 .keywords: PC, ASM, set, type
1114 
1115 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1116           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1117 @*/
1118 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1119 {
1120   PetscErrorCode ierr;
1121 
1122   PetscFunctionBegin;
1123   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1124   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1125   PetscFunctionReturn(0);
1126 }
1127 
1128 /*@
1129   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1130 
1131   Logically Collective on PC
1132 
1133   Input Parameters:
1134 + pc  - the preconditioner context
1135 - type - type of composition, one of
1136 .vb
1137   PC_COMPOSITE_ADDITIVE       - local additive combination
1138   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1139 .ve
1140 
1141   Options Database Key:
1142 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1143 
1144   Level: intermediate
1145 
1146 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1147 @*/
1148 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1149 {
1150   PetscErrorCode ierr;
1151 
1152   PetscFunctionBegin;
1153   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1154   PetscValidLogicalCollectiveEnum(pc, type, 2);
1155   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1156   PetscFunctionReturn(0);
1157 }
1158 
1159 /*@
1160   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1161 
1162   Logically Collective on PC
1163 
1164   Input Parameter:
1165 . pc  - the preconditioner context
1166 
1167   Output Parameter:
1168 . type - type of composition, one of
1169 .vb
1170   PC_COMPOSITE_ADDITIVE       - local additive combination
1171   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1172 .ve
1173 
1174   Options Database Key:
1175 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1176 
1177   Level: intermediate
1178 
1179 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1180 @*/
1181 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1182 {
1183   PetscErrorCode ierr;
1184 
1185   PetscFunctionBegin;
1186   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1187   PetscValidPointer(type, 2);
1188   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1189   PetscFunctionReturn(0);
1190 }
1191 
1192 /*@
1193     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1194 
1195     Logically Collective on PC
1196 
1197     Input Parameters:
1198 +   pc  - the preconditioner context
1199 -   doSort - sort the subdomain indices
1200 
1201     Level: intermediate
1202 
1203 .keywords: PC, ASM, set, type
1204 
1205 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1206           PCASMCreateSubdomains2D()
1207 @*/
1208 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1209 {
1210   PetscErrorCode ierr;
1211 
1212   PetscFunctionBegin;
1213   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1214   PetscValidLogicalCollectiveBool(pc,doSort,2);
1215   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1216   PetscFunctionReturn(0);
1217 }
1218 
1219 /*@C
1220    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1221    this processor.
1222 
1223    Collective on PC iff first_local is requested
1224 
1225    Input Parameter:
1226 .  pc - the preconditioner context
1227 
1228    Output Parameters:
1229 +  n_local - the number of blocks on this processor or NULL
1230 .  first_local - the global number of the first block on this processor or NULL,
1231                  all processors must request or all must pass NULL
1232 -  ksp - the array of KSP contexts
1233 
1234    Note:
1235    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1236 
1237    You must call KSPSetUp() before calling PCASMGetSubKSP().
1238 
1239    Fortran note:
1240    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1241 
1242    Level: advanced
1243 
1244 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1245 
1246 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1247           PCASMCreateSubdomains2D(),
1248 @*/
1249 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1250 {
1251   PetscErrorCode ierr;
1252 
1253   PetscFunctionBegin;
1254   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1255   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1256   PetscFunctionReturn(0);
1257 }
1258 
1259 /* -------------------------------------------------------------------------------------*/
1260 /*MC
1261    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1262            its own KSP object.
1263 
1264    Options Database Keys:
1265 +  -pc_asm_blocks <blks> - Sets total blocks
1266 .  -pc_asm_overlap <ovl> - Sets overlap
1267 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1268 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1269 
1270      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1271       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1272       -pc_asm_type basic to use the standard ASM.
1273 
1274    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1275      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1276 
1277      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1278         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1279 
1280      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1281          and set the options directly on the resulting KSP object (you can access its PC
1282          with KSPGetPC())
1283 
1284    Level: beginner
1285 
1286    Concepts: additive Schwarz method
1287 
1288     References:
1289 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1290      Courant Institute, New York University Technical report
1291 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1292     Cambridge University Press.
1293 
1294 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1295            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1296            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1297 
1298 M*/
1299 
1300 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1301 {
1302   PetscErrorCode ierr;
1303   PC_ASM         *osm;
1304 
1305   PetscFunctionBegin;
1306   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1307 
1308   osm->n                 = PETSC_DECIDE;
1309   osm->n_local           = 0;
1310   osm->n_local_true      = PETSC_DECIDE;
1311   osm->overlap           = 1;
1312   osm->ksp               = 0;
1313   osm->restriction       = 0;
1314   osm->localization      = 0;
1315   osm->prolongation      = 0;
1316   osm->lprolongation     = 0;
1317   osm->x                 = 0;
1318   osm->y                 = 0;
1319   osm->y_local           = 0;
1320   osm->is                = 0;
1321   osm->is_local          = 0;
1322   osm->mat               = 0;
1323   osm->pmat              = 0;
1324   osm->type              = PC_ASM_RESTRICT;
1325   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1326   osm->same_local_solves = PETSC_TRUE;
1327   osm->sort_indices      = PETSC_TRUE;
1328   osm->dm_subdomains     = PETSC_FALSE;
1329   osm->sub_mat_type      = NULL;
1330 
1331   pc->data                 = (void*)osm;
1332   pc->ops->apply           = PCApply_ASM;
1333   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1334   pc->ops->setup           = PCSetUp_ASM;
1335   pc->ops->reset           = PCReset_ASM;
1336   pc->ops->destroy         = PCDestroy_ASM;
1337   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1338   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1339   pc->ops->view            = PCView_ASM;
1340   pc->ops->applyrichardson = 0;
1341 
1342   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1343   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1344   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1345   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1346   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1347   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1348   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1349   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1350   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1351   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1352   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1353   PetscFunctionReturn(0);
1354 }
1355 
1356 /*@C
1357    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1358    preconditioner for a any problem on a general grid.
1359 
1360    Collective
1361 
1362    Input Parameters:
1363 +  A - The global matrix operator
1364 -  n - the number of local blocks
1365 
1366    Output Parameters:
1367 .  outis - the array of index sets defining the subdomains
1368 
1369    Level: advanced
1370 
1371    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1372     from these if you use PCASMSetLocalSubdomains()
1373 
1374     In the Fortran version you must provide the array outis[] already allocated of length n.
1375 
1376 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1377 
1378 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1379 @*/
1380 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1381 {
1382   MatPartitioning mpart;
1383   const char      *prefix;
1384   void            (*f)(void);
1385   PetscInt        i,j,rstart,rend,bs;
1386   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1387   Mat             Ad     = NULL, adj;
1388   IS              ispart,isnumb,*is;
1389   PetscErrorCode  ierr;
1390 
1391   PetscFunctionBegin;
1392   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1393   PetscValidPointer(outis,3);
1394   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1395 
1396   /* Get prefix, row distribution, and block size */
1397   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1398   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1399   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1400   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1401 
1402   /* Get diagonal block from matrix if possible */
1403   ierr = MatShellGetOperation(A,MATOP_GET_DIAGONAL_BLOCK,&f);CHKERRQ(ierr);
1404   if (f) {
1405     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1406   }
1407   if (Ad) {
1408     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1409     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1410   }
1411   if (Ad && n > 1) {
1412     PetscBool match,done;
1413     /* Try to setup a good matrix partitioning if available */
1414     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1415     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1416     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1417     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1418     if (!match) {
1419       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1420     }
1421     if (!match) { /* assume a "good" partitioner is available */
1422       PetscInt       na;
1423       const PetscInt *ia,*ja;
1424       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1425       if (done) {
1426         /* Build adjacency matrix by hand. Unfortunately a call to
1427            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1428            remove the block-aij structure and we cannot expect
1429            MatPartitioning to split vertices as we need */
1430         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1431         const PetscInt *row;
1432         nnz = 0;
1433         for (i=0; i<na; i++) { /* count number of nonzeros */
1434           len = ia[i+1] - ia[i];
1435           row = ja + ia[i];
1436           for (j=0; j<len; j++) {
1437             if (row[j] == i) { /* don't count diagonal */
1438               len--; break;
1439             }
1440           }
1441           nnz += len;
1442         }
1443         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1444         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1445         nnz    = 0;
1446         iia[0] = 0;
1447         for (i=0; i<na; i++) { /* fill adjacency */
1448           cnt = 0;
1449           len = ia[i+1] - ia[i];
1450           row = ja + ia[i];
1451           for (j=0; j<len; j++) {
1452             if (row[j] != i) { /* if not diagonal */
1453               jja[nnz+cnt++] = row[j];
1454             }
1455           }
1456           nnz     += cnt;
1457           iia[i+1] = nnz;
1458         }
1459         /* Partitioning of the adjacency matrix */
1460         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1461         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1462         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1463         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1464         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1465         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1466         foundpart = PETSC_TRUE;
1467       }
1468       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1469     }
1470     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1471   }
1472 
1473   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1474   *outis = is;
1475 
1476   if (!foundpart) {
1477 
1478     /* Partitioning by contiguous chunks of rows */
1479 
1480     PetscInt mbs   = (rend-rstart)/bs;
1481     PetscInt start = rstart;
1482     for (i=0; i<n; i++) {
1483       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1484       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1485       start += count;
1486     }
1487 
1488   } else {
1489 
1490     /* Partitioning by adjacency of diagonal block  */
1491 
1492     const PetscInt *numbering;
1493     PetscInt       *count,nidx,*indices,*newidx,start=0;
1494     /* Get node count in each partition */
1495     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1496     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1497     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1498       for (i=0; i<n; i++) count[i] *= bs;
1499     }
1500     /* Build indices from node numbering */
1501     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1502     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1503     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1504     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1505     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1506     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1507     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1508       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1509       for (i=0; i<nidx; i++) {
1510         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1511       }
1512       ierr    = PetscFree(indices);CHKERRQ(ierr);
1513       nidx   *= bs;
1514       indices = newidx;
1515     }
1516     /* Shift to get global indices */
1517     for (i=0; i<nidx; i++) indices[i] += rstart;
1518 
1519     /* Build the index sets for each block */
1520     for (i=0; i<n; i++) {
1521       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1522       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1523       start += count[i];
1524     }
1525 
1526     ierr = PetscFree(count);CHKERRQ(ierr);
1527     ierr = PetscFree(indices);CHKERRQ(ierr);
1528     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1529     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1530 
1531   }
1532   PetscFunctionReturn(0);
1533 }
1534 
1535 /*@C
1536    PCASMDestroySubdomains - Destroys the index sets created with
1537    PCASMCreateSubdomains(). Should be called after setting subdomains
1538    with PCASMSetLocalSubdomains().
1539 
1540    Collective
1541 
1542    Input Parameters:
1543 +  n - the number of index sets
1544 .  is - the array of index sets
1545 -  is_local - the array of local index sets, can be NULL
1546 
1547    Level: advanced
1548 
1549 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1550 
1551 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1552 @*/
1553 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1554 {
1555   PetscInt       i;
1556   PetscErrorCode ierr;
1557 
1558   PetscFunctionBegin;
1559   if (n <= 0) PetscFunctionReturn(0);
1560   if (is) {
1561     PetscValidPointer(is,2);
1562     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1563     ierr = PetscFree(is);CHKERRQ(ierr);
1564   }
1565   if (is_local) {
1566     PetscValidPointer(is_local,3);
1567     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1568     ierr = PetscFree(is_local);CHKERRQ(ierr);
1569   }
1570   PetscFunctionReturn(0);
1571 }
1572 
1573 /*@
1574    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1575    preconditioner for a two-dimensional problem on a regular grid.
1576 
1577    Not Collective
1578 
1579    Input Parameters:
1580 +  m, n - the number of mesh points in the x and y directions
1581 .  M, N - the number of subdomains in the x and y directions
1582 .  dof - degrees of freedom per node
1583 -  overlap - overlap in mesh lines
1584 
1585    Output Parameters:
1586 +  Nsub - the number of subdomains created
1587 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1588 -  is_local - array of index sets defining non-overlapping subdomains
1589 
1590    Note:
1591    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1592    preconditioners.  More general related routines are
1593    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1594 
1595    Level: advanced
1596 
1597 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1598 
1599 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1600           PCASMSetOverlap()
1601 @*/
1602 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1603 {
1604   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1605   PetscErrorCode ierr;
1606   PetscInt       nidx,*idx,loc,ii,jj,count;
1607 
1608   PetscFunctionBegin;
1609   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1610 
1611   *Nsub     = N*M;
1612   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1613   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1614   ystart    = 0;
1615   loc_outer = 0;
1616   for (i=0; i<N; i++) {
1617     height = n/N + ((n % N) > i); /* height of subdomain */
1618     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1619     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1620     yright = ystart + height + overlap; if (yright > n) yright = n;
1621     xstart = 0;
1622     for (j=0; j<M; j++) {
1623       width = m/M + ((m % M) > j); /* width of subdomain */
1624       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1625       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1626       xright = xstart + width + overlap; if (xright > m) xright = m;
1627       nidx   = (xright - xleft)*(yright - yleft);
1628       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1629       loc    = 0;
1630       for (ii=yleft; ii<yright; ii++) {
1631         count = m*ii + xleft;
1632         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1633       }
1634       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1635       if (overlap == 0) {
1636         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1637 
1638         (*is_local)[loc_outer] = (*is)[loc_outer];
1639       } else {
1640         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1641           for (jj=xstart; jj<xstart+width; jj++) {
1642             idx[loc++] = m*ii + jj;
1643           }
1644         }
1645         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1646       }
1647       ierr    = PetscFree(idx);CHKERRQ(ierr);
1648       xstart += width;
1649       loc_outer++;
1650     }
1651     ystart += height;
1652   }
1653   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1654   PetscFunctionReturn(0);
1655 }
1656 
1657 /*@C
1658     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1659     only) for the additive Schwarz preconditioner.
1660 
1661     Not Collective
1662 
1663     Input Parameter:
1664 .   pc - the preconditioner context
1665 
1666     Output Parameters:
1667 +   n - the number of subdomains for this processor (default value = 1)
1668 .   is - the index sets that define the subdomains for this processor
1669 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1670 
1671 
1672     Notes:
1673     The IS numbering is in the parallel, global numbering of the vector.
1674 
1675     Level: advanced
1676 
1677 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1678 
1679 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1680           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1681 @*/
1682 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1683 {
1684   PC_ASM         *osm = (PC_ASM*)pc->data;
1685   PetscErrorCode ierr;
1686   PetscBool      match;
1687 
1688   PetscFunctionBegin;
1689   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1690   PetscValidIntPointer(n,2);
1691   if (is) PetscValidPointer(is,3);
1692   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1693   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1694   if (n) *n = osm->n_local_true;
1695   if (is) *is = osm->is;
1696   if (is_local) *is_local = osm->is_local;
1697   PetscFunctionReturn(0);
1698 }
1699 
1700 /*@C
1701     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1702     only) for the additive Schwarz preconditioner.
1703 
1704     Not Collective
1705 
1706     Input Parameter:
1707 .   pc - the preconditioner context
1708 
1709     Output Parameters:
1710 +   n - the number of matrices for this processor (default value = 1)
1711 -   mat - the matrices
1712 
1713     Level: advanced
1714 
1715     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1716 
1717            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1718 
1719 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1720 
1721 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1722           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1723 @*/
1724 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1725 {
1726   PC_ASM         *osm;
1727   PetscErrorCode ierr;
1728   PetscBool      match;
1729 
1730   PetscFunctionBegin;
1731   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1732   PetscValidIntPointer(n,2);
1733   if (mat) PetscValidPointer(mat,3);
1734   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1735   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1736   if (!match) {
1737     if (n) *n = 0;
1738     if (mat) *mat = NULL;
1739   } else {
1740     osm = (PC_ASM*)pc->data;
1741     if (n) *n = osm->n_local_true;
1742     if (mat) *mat = osm->pmat;
1743   }
1744   PetscFunctionReturn(0);
1745 }
1746 
1747 /*@
1748     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1749 
1750     Logically Collective
1751 
1752     Input Parameter:
1753 +   pc  - the preconditioner
1754 -   flg - boolean indicating whether to use subdomains defined by the DM
1755 
1756     Options Database Key:
1757 .   -pc_asm_dm_subdomains
1758 
1759     Level: intermediate
1760 
1761     Notes:
1762     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1763     so setting either of the first two effectively turns the latter off.
1764 
1765 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1766 
1767 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1768           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1769 @*/
1770 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1771 {
1772   PC_ASM         *osm = (PC_ASM*)pc->data;
1773   PetscErrorCode ierr;
1774   PetscBool      match;
1775 
1776   PetscFunctionBegin;
1777   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1778   PetscValidLogicalCollectiveBool(pc,flg,2);
1779   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1780   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1781   if (match) {
1782     osm->dm_subdomains = flg;
1783   }
1784   PetscFunctionReturn(0);
1785 }
1786 
1787 /*@
1788     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1789     Not Collective
1790 
1791     Input Parameter:
1792 .   pc  - the preconditioner
1793 
1794     Output Parameter:
1795 .   flg - boolean indicating whether to use subdomains defined by the DM
1796 
1797     Level: intermediate
1798 
1799 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1800 
1801 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1802           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1803 @*/
1804 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1805 {
1806   PC_ASM         *osm = (PC_ASM*)pc->data;
1807   PetscErrorCode ierr;
1808   PetscBool      match;
1809 
1810   PetscFunctionBegin;
1811   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1812   PetscValidPointer(flg,2);
1813   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1814   if (match) {
1815     if (flg) *flg = osm->dm_subdomains;
1816   }
1817   PetscFunctionReturn(0);
1818 }
1819 
1820 /*@
1821      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1822 
1823    Not Collective
1824 
1825    Input Parameter:
1826 .  pc - the PC
1827 
1828    Output Parameter:
1829 .  -pc_asm_sub_mat_type - name of matrix type
1830 
1831    Level: advanced
1832 
1833 .keywords: PC, PCASM, MatType, set
1834 
1835 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1836 @*/
1837 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1838   PetscErrorCode ierr;
1839 
1840   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1841   PetscFunctionReturn(0);
1842 }
1843 
1844 /*@
1845      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1846 
1847    Collective on Mat
1848 
1849    Input Parameters:
1850 +  pc             - the PC object
1851 -  sub_mat_type   - matrix type
1852 
1853    Options Database Key:
1854 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1855 
1856    Notes:
1857    See "${PETSC_DIR}/include/petscmat.h" for available types
1858 
1859   Level: advanced
1860 
1861 .keywords: PC, PCASM, MatType, set
1862 
1863 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1864 @*/
1865 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1866 {
1867   PetscErrorCode ierr;
1868 
1869   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1870   PetscFunctionReturn(0);
1871 }
1872