xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 0da1b77f3bbd6d81eb41c738f320bd6efaaf1a1d)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion; used for restrict version of algorithms */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
32   PCCompositeType loctype;        /* the type of composition for local solves */
33   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
34   /* For multiplicative solve */
35   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
36   Vec        lx, ly;              /* work vectors */
37   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
38   VecScatter *lprolongation;      /* mapping from subregion to overlapping process subdomain */
39 } PC_ASM;
40 
41 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
42 {
43   PC_ASM         *osm = (PC_ASM*)pc->data;
44   PetscErrorCode ierr;
45   PetscMPIInt    rank;
46   PetscInt       i,bsz;
47   PetscBool      iascii,isstring;
48   PetscViewer    sviewer;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
52   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
53   if (iascii) {
54     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
55     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
56     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
57     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
59     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
60     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
61     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
62     if (osm->same_local_solves) {
63       if (osm->ksp) {
64         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
65         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
66         if (!rank) {
67           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
68           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
69           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
70         }
71         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
72       }
73     } else {
74       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
75       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
76       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
79       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
81       for (i=0; i<osm->n_local_true; i++) {
82         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
83         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
84         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
85         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
86       }
87       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
88       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
91     }
92   } else if (isstring) {
93     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
94     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
95     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
96     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
97   }
98   PetscFunctionReturn(0);
99 }
100 
101 static PetscErrorCode PCASMPrintSubdomains(PC pc)
102 {
103   PC_ASM         *osm = (PC_ASM*)pc->data;
104   const char     *prefix;
105   char           fname[PETSC_MAX_PATH_LEN+1];
106   PetscViewer    viewer, sviewer;
107   char           *s;
108   PetscInt       i,j,nidx;
109   const PetscInt *idx;
110   PetscMPIInt    rank, size;
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
115   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
116   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
117   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
118   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
119   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
120   for (i=0; i<osm->n_local; i++) {
121     if (i < osm->n_local_true) {
122       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
123       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
124       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
125       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
126       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
127       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
128       for (j=0; j<nidx; j++) {
129         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
130       }
131       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
132       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
133       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
135       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
136       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
137       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
138       ierr = PetscFree(s);CHKERRQ(ierr);
139       if (osm->is_local) {
140         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
141         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
142         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
143         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
144         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
145         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
146         for (j=0; j<nidx; j++) {
147           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
148         }
149         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
150         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
151         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
152         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
153         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
154         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
155         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
156         ierr = PetscFree(s);CHKERRQ(ierr);
157       }
158     } else {
159       /* Participate in collective viewer calls. */
160       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
161       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
162       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
163       /* Assume either all ranks have is_local or none do. */
164       if (osm->is_local) {
165         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
166         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
167         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
168       }
169     }
170   }
171   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
172   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 static PetscErrorCode PCSetUp_ASM(PC pc)
177 {
178   PC_ASM         *osm = (PC_ASM*)pc->data;
179   PetscErrorCode ierr;
180   PetscBool      symset,flg;
181   PetscInt       i,m,m_local;
182   MatReuse       scall = MAT_REUSE_MATRIX;
183   IS             isl;
184   KSP            ksp;
185   PC             subpc;
186   const char     *prefix,*pprefix;
187   Vec            vec;
188   DM             *domain_dm = NULL;
189 
190   PetscFunctionBegin;
191   if (!pc->setupcalled) {
192 
193     if (!osm->type_set) {
194       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
195       if (symset && flg) osm->type = PC_ASM_BASIC;
196     }
197 
198     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
199     if (osm->n_local_true == PETSC_DECIDE) {
200       /* no subdomains given */
201       /* try pc->dm first, if allowed */
202       if (osm->dm_subdomains && pc->dm) {
203         PetscInt  num_domains, d;
204         char      **domain_names;
205         IS        *inner_domain_is, *outer_domain_is;
206         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
207         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
208                               A future improvement of this code might allow one to use
209                               DM-defined subdomains and also increase the overlap,
210                               but that is not currently supported */
211         if (num_domains) {
212           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
213         }
214         for (d = 0; d < num_domains; ++d) {
215           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
216           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
217           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
218         }
219         ierr = PetscFree(domain_names);CHKERRQ(ierr);
220         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
221         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
222       }
223       if (osm->n_local_true == PETSC_DECIDE) {
224         /* still no subdomains; use one subdomain per processor */
225         osm->n_local_true = 1;
226       }
227     }
228     { /* determine the global and max number of subdomains */
229       struct {PetscInt max,sum;} inwork,outwork;
230       PetscMPIInt size;
231 
232       inwork.max   = osm->n_local_true;
233       inwork.sum   = osm->n_local_true;
234       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
235       osm->n_local = outwork.max;
236       osm->n       = outwork.sum;
237 
238       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
239       if (outwork.max == 1 && outwork.sum == size) {
240         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
241         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
242       }
243     }
244     if (!osm->is) { /* create the index sets */
245       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
246     }
247     if (osm->n_local_true > 1 && !osm->is_local) {
248       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
249       for (i=0; i<osm->n_local_true; i++) {
250         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
251           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
252           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
253         } else {
254           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
255           osm->is_local[i] = osm->is[i];
256         }
257       }
258     }
259     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
260     flg  = PETSC_FALSE;
261     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
262     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
263 
264     if (osm->overlap > 0) {
265       /* Extend the "overlapping" regions by a number of steps */
266       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
267     }
268     if (osm->sort_indices) {
269       for (i=0; i<osm->n_local_true; i++) {
270         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
271         if (osm->is_local) {
272           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
273         }
274       }
275     }
276 
277     if (!osm->ksp) {
278       /* Create the local solvers */
279       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
280       if (domain_dm) {
281         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
282       }
283       for (i=0; i<osm->n_local_true; i++) {
284         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
285         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
286         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
287         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
288         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
289         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
290         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
291         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
292         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
293         if (domain_dm) {
294           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
295           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
296           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
297         }
298         osm->ksp[i] = ksp;
299       }
300       if (domain_dm) {
301         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
302       }
303     }
304     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
305       PetscInt m;
306       ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
307       ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
308       ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
309       ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
310       ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
311     }
312     scall = MAT_INITIAL_MATRIX;
313   } else {
314     /*
315        Destroy the blocks from the previous iteration
316     */
317     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
318       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
319       scall = MAT_INITIAL_MATRIX;
320     }
321   }
322 
323   /*
324      Extract out the submatrices
325   */
326   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
327   if (scall == MAT_INITIAL_MATRIX) {
328     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
329     for (i=0; i<osm->n_local_true; i++) {
330       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
331       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
332     }
333   }
334 
335   /* Convert the types of the submatrices (if needbe) */
336   if (osm->sub_mat_type) {
337     for (i=0; i<osm->n_local_true; i++) {
338       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
339     }
340   }
341 
342   if(!pc->setupcalled){
343     /* Create the local work vectors (from the local matrices) and scatter contexts */
344     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
345     ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr);
346     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
347     if (osm->is_local && osm->type == PC_ASM_RESTRICT) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);}
348     ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr);
349     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE)
350       ierr = PetscMalloc1(osm->n_local,&osm->lprolongation);CHKERRQ(ierr);
351 
352     ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr);
353     ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr);
354     ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr);
355     for (i=0; i<osm->n_local_true; ++i) {
356       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
357       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
358       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
359       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
360       ierr = ISDestroy(&isl);CHKERRQ(ierr);
361       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
362       if (osm->localization) {
363         ISLocalToGlobalMapping ltog;
364         IS                     isll;
365         const PetscInt         *idx_local;
366         PetscInt               *idx,nout;
367 
368         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
369         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
370         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
371         ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr);
372         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
373         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
374         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
375         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
376         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
377         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
378         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
379         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
380         ierr = ISDestroy(&isll);CHKERRQ(ierr);
381 
382         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
383         ierr = ISDestroy(&isl);CHKERRQ(ierr);
384       } else {
385         osm->y_local[i] = osm->y[i];
386         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
387         osm->prolongation[i] = osm->restriction[i];
388         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
389       }
390 
391       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE && i < osm->n_local_true - 1) {
392         ISLocalToGlobalMapping ltog;
393         IS                     isll;
394         const PetscInt         *idx_is;
395         PetscInt               *idx_lis,nout;
396 
397         ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
398         ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
399         ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
400         ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
401         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
402         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
403         if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
404         ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
405         ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
406         ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
407         ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lprolongation[i]);CHKERRQ(ierr);
408         ierr = ISDestroy(&isll);CHKERRQ(ierr);
409         ierr = ISDestroy(&isl);CHKERRQ(ierr);
410       }
411 
412     }
413     for (i=osm->n_local_true; i<osm->n_local; i++) {
414       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
415       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
416       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
417       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
418       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
419       if (osm->localization) {
420         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
421         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
422       } else {
423         osm->prolongation[i] = osm->restriction[i];
424         ierr                 = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
425       }
426       ierr = ISDestroy(&isl);CHKERRQ(ierr);
427     }
428     ierr = VecDestroy(&vec);CHKERRQ(ierr);
429   }
430 
431   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
432     IS      *cis;
433     PetscInt c;
434 
435     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
436     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
437     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
438     ierr = PetscFree(cis);CHKERRQ(ierr);
439   }
440 
441   /* Return control to the user so that the submatrices can be modified (e.g., to apply
442      different boundary conditions for the submatrices than for the global problem) */
443   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
444 
445   /*
446      Loop over subdomains putting them into local ksp
447   */
448   for (i=0; i<osm->n_local_true; i++) {
449     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
450     if (!pc->setupcalled) {
451       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
452     }
453   }
454   PetscFunctionReturn(0);
455 }
456 
457 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
458 {
459   PC_ASM             *osm = (PC_ASM*)pc->data;
460   PetscErrorCode     ierr;
461   PetscInt           i;
462   KSPConvergedReason reason;
463 
464   PetscFunctionBegin;
465   for (i=0; i<osm->n_local_true; i++) {
466     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
467     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
468     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
469       pc->failedreason = PC_SUBPC_ERROR;
470     }
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
476 {
477   PC_ASM         *osm = (PC_ASM*)pc->data;
478   PetscErrorCode ierr;
479   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
480   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
481 
482   PetscFunctionBegin;
483   /*
484      Support for limiting the restriction or interpolation to only local
485      subdomain values (leaving the other values 0).
486   */
487   if (!(osm->type & PC_ASM_RESTRICT)) {
488     forward = SCATTER_FORWARD_LOCAL;
489     /* have to zero the work RHS since scatter may leave some slots empty */
490     for (i=0; i<n_local_true; i++) {
491       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
492     }
493   }
494   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;
495 
496   switch (osm->loctype)
497   {
498   case PC_COMPOSITE_ADDITIVE:
499     for (i=0; i<n_local; i++) {
500       ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
501     }
502     ierr = VecZeroEntries(y);CHKERRQ(ierr);
503     /* do the local solves */
504     for (i=0; i<n_local_true; i++) {
505       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
506       ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
507       if (osm->localization) {
508         ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
509         ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
510       }
511       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
512     }
513     /* handle the rest of the scatters that do not have local solves */
514     for (i=n_local_true; i<n_local; i++) {
515       ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
516       ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
517     }
518     for (i=0; i<n_local; i++) {
519       ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
520     }
521     break;
522   case PC_COMPOSITE_MULTIPLICATIVE:
523     ierr = VecZeroEntries(y);CHKERRQ(ierr);
524     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
525     /* do the local solves */
526     for (i = 0; i < n_local_true; ++i) {
527       if (i == 0) {
528         ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
529       }
530       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
531       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr);
532       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
533       if (osm->localization) {
534         ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
535         ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr);
536       }
537       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
538       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
539       if (i < n_local_true-1) {
540 	ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
541         ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
542 	ierr = VecCopy(osm->ly, osm->lx);CHKERRQ(ierr);
543         ierr = VecScale(osm->lx, -1.0);CHKERRQ(ierr);
544         ierr = MatMult(osm->lmats[i+1], osm->lx, osm->x[i+1]);CHKERRQ(ierr);
545       }
546     }
547     /* handle the rest of the scatters that do not have local solves */
548     for (i = n_local_true; i < n_local; ++i) {
549       ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
550       ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr);
551       ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
552       ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr);
553     }
554     break;
555   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
556   }
557   PetscFunctionReturn(0);
558 }
559 
560 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
561 {
562   PC_ASM         *osm = (PC_ASM*)pc->data;
563   PetscErrorCode ierr;
564   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
565   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
566 
567   PetscFunctionBegin;
568   /*
569      Support for limiting the restriction or interpolation to only local
570      subdomain values (leaving the other values 0).
571 
572      Note: these are reversed from the PCApply_ASM() because we are applying the
573      transpose of the three terms
574   */
575   if (!(osm->type & PC_ASM_INTERPOLATE)) {
576     forward = SCATTER_FORWARD_LOCAL;
577     /* have to zero the work RHS since scatter may leave some slots empty */
578     for (i=0; i<n_local_true; i++) {
579       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
580     }
581   }
582   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
583 
584   for (i=0; i<n_local; i++) {
585     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
586   }
587   ierr = VecZeroEntries(y);CHKERRQ(ierr);
588   /* do the local solves */
589   for (i=0; i<n_local_true; i++) {
590     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
591     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
592     if (osm->localization) {
593       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
594       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
595     }
596     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
597   }
598   /* handle the rest of the scatters that do not have local solves */
599   for (i=n_local_true; i<n_local; i++) {
600     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
601     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
602   }
603   for (i=0; i<n_local; i++) {
604     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
605   }
606   PetscFunctionReturn(0);
607 }
608 
609 static PetscErrorCode PCReset_ASM(PC pc)
610 {
611   PC_ASM         *osm = (PC_ASM*)pc->data;
612   PetscErrorCode ierr;
613   PetscInt       i;
614 
615   PetscFunctionBegin;
616   if (osm->ksp) {
617     for (i=0; i<osm->n_local_true; i++) {
618       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
619     }
620   }
621   if (osm->pmat) {
622     if (osm->n_local_true > 0) {
623       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
624     }
625   }
626   if (osm->restriction) {
627     for (i=0; i<osm->n_local; i++) {
628       ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr);
629       if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);}
630       ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr);
631       if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE && i < osm->n_local_true - 1){
632 	ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);
633       }
634       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
635       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
636       ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr);
637     }
638     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
639     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
640     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
641     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
642       ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);
643     }
644     ierr = PetscFree(osm->x);CHKERRQ(ierr);
645     ierr = PetscFree(osm->y);CHKERRQ(ierr);
646     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
647   }
648   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
649   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
650     ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
651     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
652     ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
653     ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
654   }
655 
656   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
657 
658   osm->is       = 0;
659   osm->is_local = 0;
660   PetscFunctionReturn(0);
661 }
662 
663 static PetscErrorCode PCDestroy_ASM(PC pc)
664 {
665   PC_ASM         *osm = (PC_ASM*)pc->data;
666   PetscErrorCode ierr;
667   PetscInt       i;
668 
669   PetscFunctionBegin;
670   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
671   if (osm->ksp) {
672     for (i=0; i<osm->n_local_true; i++) {
673       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
674     }
675     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
676   }
677   ierr = PetscFree(pc->data);CHKERRQ(ierr);
678   PetscFunctionReturn(0);
679 }
680 
681 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
682 {
683   PC_ASM         *osm = (PC_ASM*)pc->data;
684   PetscErrorCode ierr;
685   PetscInt       blocks,ovl;
686   PetscBool      symset,flg;
687   PCASMType      asmtype;
688   PCCompositeType loctype;
689   char           sub_mat_type[256];
690 
691   PetscFunctionBegin;
692   /* set the type to symmetric if matrix is symmetric */
693   if (!osm->type_set && pc->pmat) {
694     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
695     if (symset && flg) osm->type = PC_ASM_BASIC;
696   }
697   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
698   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
699   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
700   if (flg) {
701     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
702     osm->dm_subdomains = PETSC_FALSE;
703   }
704   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
705   if (flg) {
706     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
707     osm->dm_subdomains = PETSC_FALSE;
708   }
709   flg  = PETSC_FALSE;
710   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
711   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
712   flg  = PETSC_FALSE;
713   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
714   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
715   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
716   if(flg){
717     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
718   }
719   ierr = PetscOptionsTail();CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 /*------------------------------------------------------------------------------------*/
724 
725 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
726 {
727   PC_ASM         *osm = (PC_ASM*)pc->data;
728   PetscErrorCode ierr;
729   PetscInt       i;
730 
731   PetscFunctionBegin;
732   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
733   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
734 
735   if (!pc->setupcalled) {
736     if (is) {
737       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
738     }
739     if (is_local) {
740       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
741     }
742     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
743 
744     osm->n_local_true = n;
745     osm->is           = 0;
746     osm->is_local     = 0;
747     if (is) {
748       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
749       for (i=0; i<n; i++) osm->is[i] = is[i];
750       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
751       osm->overlap = -1;
752     }
753     if (is_local) {
754       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
755       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
756       if (!is) {
757         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
758         for (i=0; i<osm->n_local_true; i++) {
759           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
760             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
761             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
762           } else {
763             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
764             osm->is[i] = osm->is_local[i];
765           }
766         }
767       }
768     }
769   }
770   PetscFunctionReturn(0);
771 }
772 
773 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
774 {
775   PC_ASM         *osm = (PC_ASM*)pc->data;
776   PetscErrorCode ierr;
777   PetscMPIInt    rank,size;
778   PetscInt       n;
779 
780   PetscFunctionBegin;
781   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
782   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
783 
784   /*
785      Split the subdomains equally among all processors
786   */
787   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
788   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
789   n    = N/size + ((N % size) > rank);
790   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
791   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
792   if (!pc->setupcalled) {
793     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
794 
795     osm->n_local_true = n;
796     osm->is           = 0;
797     osm->is_local     = 0;
798   }
799   PetscFunctionReturn(0);
800 }
801 
802 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
803 {
804   PC_ASM *osm = (PC_ASM*)pc->data;
805 
806   PetscFunctionBegin;
807   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
808   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
809   if (!pc->setupcalled) osm->overlap = ovl;
810   PetscFunctionReturn(0);
811 }
812 
813 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
814 {
815   PC_ASM *osm = (PC_ASM*)pc->data;
816 
817   PetscFunctionBegin;
818   osm->type     = type;
819   osm->type_set = PETSC_TRUE;
820   PetscFunctionReturn(0);
821 }
822 
823 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
824 {
825   PC_ASM *osm = (PC_ASM*)pc->data;
826 
827   PetscFunctionBegin;
828   *type = osm->type;
829   PetscFunctionReturn(0);
830 }
831 
832 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
833 {
834   PC_ASM *osm = (PC_ASM *) pc->data;
835 
836   PetscFunctionBegin;
837   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
838   osm->loctype = type;
839   PetscFunctionReturn(0);
840 }
841 
842 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
843 {
844   PC_ASM *osm = (PC_ASM *) pc->data;
845 
846   PetscFunctionBegin;
847   *type = osm->loctype;
848   PetscFunctionReturn(0);
849 }
850 
851 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
852 {
853   PC_ASM *osm = (PC_ASM*)pc->data;
854 
855   PetscFunctionBegin;
856   osm->sort_indices = doSort;
857   PetscFunctionReturn(0);
858 }
859 
860 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
861 {
862   PC_ASM         *osm = (PC_ASM*)pc->data;
863   PetscErrorCode ierr;
864 
865   PetscFunctionBegin;
866   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
867 
868   if (n_local) *n_local = osm->n_local_true;
869   if (first_local) {
870     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
871     *first_local -= osm->n_local_true;
872   }
873   if (ksp) {
874     /* Assume that local solves are now different; not necessarily
875        true though!  This flag is used only for PCView_ASM() */
876     *ksp                   = osm->ksp;
877     osm->same_local_solves = PETSC_FALSE;
878   }
879   PetscFunctionReturn(0);
880 }
881 
882 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
883 {
884   PC_ASM         *osm = (PC_ASM*)pc->data;
885 
886   PetscFunctionBegin;
887   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
888   PetscValidPointer(sub_mat_type,2);
889   *sub_mat_type = osm->sub_mat_type;
890   PetscFunctionReturn(0);
891 }
892 
893 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
894 {
895   PetscErrorCode    ierr;
896   PC_ASM            *osm = (PC_ASM*)pc->data;
897 
898   PetscFunctionBegin;
899   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
900   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
901   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 /*@C
906     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
907 
908     Collective on PC
909 
910     Input Parameters:
911 +   pc - the preconditioner context
912 .   n - the number of subdomains for this processor (default value = 1)
913 .   is - the index set that defines the subdomains for this processor
914          (or NULL for PETSc to determine subdomains)
915 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
916          (or NULL to not provide these)
917 
918     Notes:
919     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
920 
921     By default the ASM preconditioner uses 1 block per processor.
922 
923     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
924 
925     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
926     back to form the global solution (this is the standard restricted additive Schwarz method)
927 
928     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
929     no code to handle that case.
930 
931     Level: advanced
932 
933 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
934 
935 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
936           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
937 @*/
938 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
939 {
940   PetscErrorCode ierr;
941 
942   PetscFunctionBegin;
943   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
944   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
945   PetscFunctionReturn(0);
946 }
947 
948 /*@C
949     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
950     additive Schwarz preconditioner.  Either all or no processors in the
951     PC communicator must call this routine, with the same index sets.
952 
953     Collective on PC
954 
955     Input Parameters:
956 +   pc - the preconditioner context
957 .   N  - the number of subdomains for all processors
958 .   is - the index sets that define the subdomains for all processors
959          (or NULL to ask PETSc to determine the subdomains)
960 -   is_local - the index sets that define the local part of the subdomains for this processor
961          (or NULL to not provide this information)
962 
963     Options Database Key:
964     To set the total number of subdomain blocks rather than specify the
965     index sets, use the option
966 .    -pc_asm_blocks <blks> - Sets total blocks
967 
968     Notes:
969     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
970 
971     By default the ASM preconditioner uses 1 block per processor.
972 
973     These index sets cannot be destroyed until after completion of the
974     linear solves for which the ASM preconditioner is being used.
975 
976     Use PCASMSetLocalSubdomains() to set local subdomains.
977 
978     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
979 
980     Level: advanced
981 
982 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
983 
984 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
985           PCASMCreateSubdomains2D()
986 @*/
987 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
988 {
989   PetscErrorCode ierr;
990 
991   PetscFunctionBegin;
992   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
993   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
994   PetscFunctionReturn(0);
995 }
996 
997 /*@
998     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
999     additive Schwarz preconditioner.  Either all or no processors in the
1000     PC communicator must call this routine.
1001 
1002     Logically Collective on PC
1003 
1004     Input Parameters:
1005 +   pc  - the preconditioner context
1006 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
1007 
1008     Options Database Key:
1009 .   -pc_asm_overlap <ovl> - Sets overlap
1010 
1011     Notes:
1012     By default the ASM preconditioner uses 1 block per processor.  To use
1013     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1014     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1015 
1016     The overlap defaults to 1, so if one desires that no additional
1017     overlap be computed beyond what may have been set with a call to
1018     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1019     must be set to be 0.  In particular, if one does not explicitly set
1020     the subdomains an application code, then all overlap would be computed
1021     internally by PETSc, and using an overlap of 0 would result in an ASM
1022     variant that is equivalent to the block Jacobi preconditioner.
1023 
1024     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1025     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1026 
1027     Note that one can define initial index sets with any overlap via
1028     PCASMSetLocalSubdomains(); the routine
1029     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1030     if desired.
1031 
1032     Level: intermediate
1033 
1034 .keywords: PC, ASM, set, overlap
1035 
1036 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1037           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1038 @*/
1039 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1040 {
1041   PetscErrorCode ierr;
1042 
1043   PetscFunctionBegin;
1044   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1045   PetscValidLogicalCollectiveInt(pc,ovl,2);
1046   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 /*@
1051     PCASMSetType - Sets the type of restriction and interpolation used
1052     for local problems in the additive Schwarz method.
1053 
1054     Logically Collective on PC
1055 
1056     Input Parameters:
1057 +   pc  - the preconditioner context
1058 -   type - variant of ASM, one of
1059 .vb
1060       PC_ASM_BASIC       - full interpolation and restriction
1061       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1062       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1063       PC_ASM_NONE        - local processor restriction and interpolation
1064 .ve
1065 
1066     Options Database Key:
1067 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1068 
1069     Notes: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1070     to limit the local processor interpolation
1071 
1072     Level: intermediate
1073 
1074 .keywords: PC, ASM, set, type
1075 
1076 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1077           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1078 @*/
1079 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1080 {
1081   PetscErrorCode ierr;
1082 
1083   PetscFunctionBegin;
1084   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1085   PetscValidLogicalCollectiveEnum(pc,type,2);
1086   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1087   PetscFunctionReturn(0);
1088 }
1089 
1090 /*@
1091     PCASMGetType - Gets the type of restriction and interpolation used
1092     for local problems in the additive Schwarz method.
1093 
1094     Logically Collective on PC
1095 
1096     Input Parameter:
1097 .   pc  - the preconditioner context
1098 
1099     Output Parameter:
1100 .   type - variant of ASM, one of
1101 
1102 .vb
1103       PC_ASM_BASIC       - full interpolation and restriction
1104       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1105       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1106       PC_ASM_NONE        - local processor restriction and interpolation
1107 .ve
1108 
1109     Options Database Key:
1110 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1111 
1112     Level: intermediate
1113 
1114 .keywords: PC, ASM, set, type
1115 
1116 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1117           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1118 @*/
1119 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1120 {
1121   PetscErrorCode ierr;
1122 
1123   PetscFunctionBegin;
1124   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1125   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1126   PetscFunctionReturn(0);
1127 }
1128 
1129 /*@
1130   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1131 
1132   Logically Collective on PC
1133 
1134   Input Parameters:
1135 + pc  - the preconditioner context
1136 - type - type of composition, one of
1137 .vb
1138   PC_COMPOSITE_ADDITIVE       - local additive combination
1139   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1140 .ve
1141 
1142   Options Database Key:
1143 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1144 
1145   Level: intermediate
1146 
1147 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1148 @*/
1149 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1150 {
1151   PetscErrorCode ierr;
1152 
1153   PetscFunctionBegin;
1154   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1155   PetscValidLogicalCollectiveEnum(pc, type, 2);
1156   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 /*@
1161   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1162 
1163   Logically Collective on PC
1164 
1165   Input Parameter:
1166 . pc  - the preconditioner context
1167 
1168   Output Parameter:
1169 . type - type of composition, one of
1170 .vb
1171   PC_COMPOSITE_ADDITIVE       - local additive combination
1172   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1173 .ve
1174 
1175   Options Database Key:
1176 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1177 
1178   Level: intermediate
1179 
1180 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1181 @*/
1182 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1183 {
1184   PetscErrorCode ierr;
1185 
1186   PetscFunctionBegin;
1187   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1188   PetscValidPointer(type, 2);
1189   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 /*@
1194     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1195 
1196     Logically Collective on PC
1197 
1198     Input Parameters:
1199 +   pc  - the preconditioner context
1200 -   doSort - sort the subdomain indices
1201 
1202     Level: intermediate
1203 
1204 .keywords: PC, ASM, set, type
1205 
1206 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1207           PCASMCreateSubdomains2D()
1208 @*/
1209 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1210 {
1211   PetscErrorCode ierr;
1212 
1213   PetscFunctionBegin;
1214   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1215   PetscValidLogicalCollectiveBool(pc,doSort,2);
1216   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 /*@C
1221    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1222    this processor.
1223 
1224    Collective on PC iff first_local is requested
1225 
1226    Input Parameter:
1227 .  pc - the preconditioner context
1228 
1229    Output Parameters:
1230 +  n_local - the number of blocks on this processor or NULL
1231 .  first_local - the global number of the first block on this processor or NULL,
1232                  all processors must request or all must pass NULL
1233 -  ksp - the array of KSP contexts
1234 
1235    Note:
1236    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1237 
1238    You must call KSPSetUp() before calling PCASMGetSubKSP().
1239 
1240    Fortran note:
1241    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1242 
1243    Level: advanced
1244 
1245 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1246 
1247 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1248           PCASMCreateSubdomains2D(),
1249 @*/
1250 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1251 {
1252   PetscErrorCode ierr;
1253 
1254   PetscFunctionBegin;
1255   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1256   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1257   PetscFunctionReturn(0);
1258 }
1259 
1260 /* -------------------------------------------------------------------------------------*/
1261 /*MC
1262    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1263            its own KSP object.
1264 
1265    Options Database Keys:
1266 +  -pc_asm_blocks <blks> - Sets total blocks
1267 .  -pc_asm_overlap <ovl> - Sets overlap
1268 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1269 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1270 
1271      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1272       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1273       -pc_asm_type basic to use the standard ASM.
1274 
1275    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1276      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1277 
1278      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1279         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1280 
1281      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1282          and set the options directly on the resulting KSP object (you can access its PC
1283          with KSPGetPC())
1284 
1285    Level: beginner
1286 
1287    Concepts: additive Schwarz method
1288 
1289     References:
1290 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1291      Courant Institute, New York University Technical report
1292 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1293     Cambridge University Press.
1294 
1295 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1296            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1297            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1298 
1299 M*/
1300 
1301 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1302 {
1303   PetscErrorCode ierr;
1304   PC_ASM         *osm;
1305 
1306   PetscFunctionBegin;
1307   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1308 
1309   osm->n                 = PETSC_DECIDE;
1310   osm->n_local           = 0;
1311   osm->n_local_true      = PETSC_DECIDE;
1312   osm->overlap           = 1;
1313   osm->ksp               = 0;
1314   osm->restriction       = 0;
1315   osm->localization      = 0;
1316   osm->prolongation      = 0;
1317   osm->lprolongation     = 0;
1318   osm->x                 = 0;
1319   osm->y                 = 0;
1320   osm->y_local           = 0;
1321   osm->is                = 0;
1322   osm->is_local          = 0;
1323   osm->mat               = 0;
1324   osm->pmat              = 0;
1325   osm->type              = PC_ASM_RESTRICT;
1326   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1327   osm->same_local_solves = PETSC_TRUE;
1328   osm->sort_indices      = PETSC_TRUE;
1329   osm->dm_subdomains     = PETSC_FALSE;
1330   osm->sub_mat_type      = NULL;
1331 
1332   pc->data                 = (void*)osm;
1333   pc->ops->apply           = PCApply_ASM;
1334   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1335   pc->ops->setup           = PCSetUp_ASM;
1336   pc->ops->reset           = PCReset_ASM;
1337   pc->ops->destroy         = PCDestroy_ASM;
1338   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1339   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1340   pc->ops->view            = PCView_ASM;
1341   pc->ops->applyrichardson = 0;
1342 
1343   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1344   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1345   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1346   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1347   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1348   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1349   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1350   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1351   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1352   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1353   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1354   PetscFunctionReturn(0);
1355 }
1356 
1357 /*@C
1358    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1359    preconditioner for a any problem on a general grid.
1360 
1361    Collective
1362 
1363    Input Parameters:
1364 +  A - The global matrix operator
1365 -  n - the number of local blocks
1366 
1367    Output Parameters:
1368 .  outis - the array of index sets defining the subdomains
1369 
1370    Level: advanced
1371 
1372    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1373     from these if you use PCASMSetLocalSubdomains()
1374 
1375     In the Fortran version you must provide the array outis[] already allocated of length n.
1376 
1377 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1378 
1379 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1380 @*/
1381 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1382 {
1383   MatPartitioning mpart;
1384   const char      *prefix;
1385   void            (*f)(void);
1386   PetscInt        i,j,rstart,rend,bs;
1387   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1388   Mat             Ad     = NULL, adj;
1389   IS              ispart,isnumb,*is;
1390   PetscErrorCode  ierr;
1391 
1392   PetscFunctionBegin;
1393   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1394   PetscValidPointer(outis,3);
1395   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1396 
1397   /* Get prefix, row distribution, and block size */
1398   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1399   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1400   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1401   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1402 
1403   /* Get diagonal block from matrix if possible */
1404   ierr = MatShellGetOperation(A,MATOP_GET_DIAGONAL_BLOCK,&f);CHKERRQ(ierr);
1405   if (f) {
1406     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1407   }
1408   if (Ad) {
1409     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1410     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1411   }
1412   if (Ad && n > 1) {
1413     PetscBool match,done;
1414     /* Try to setup a good matrix partitioning if available */
1415     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1416     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1417     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1418     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1419     if (!match) {
1420       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1421     }
1422     if (!match) { /* assume a "good" partitioner is available */
1423       PetscInt       na;
1424       const PetscInt *ia,*ja;
1425       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1426       if (done) {
1427         /* Build adjacency matrix by hand. Unfortunately a call to
1428            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1429            remove the block-aij structure and we cannot expect
1430            MatPartitioning to split vertices as we need */
1431         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1432         const PetscInt *row;
1433         nnz = 0;
1434         for (i=0; i<na; i++) { /* count number of nonzeros */
1435           len = ia[i+1] - ia[i];
1436           row = ja + ia[i];
1437           for (j=0; j<len; j++) {
1438             if (row[j] == i) { /* don't count diagonal */
1439               len--; break;
1440             }
1441           }
1442           nnz += len;
1443         }
1444         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1445         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1446         nnz    = 0;
1447         iia[0] = 0;
1448         for (i=0; i<na; i++) { /* fill adjacency */
1449           cnt = 0;
1450           len = ia[i+1] - ia[i];
1451           row = ja + ia[i];
1452           for (j=0; j<len; j++) {
1453             if (row[j] != i) { /* if not diagonal */
1454               jja[nnz+cnt++] = row[j];
1455             }
1456           }
1457           nnz     += cnt;
1458           iia[i+1] = nnz;
1459         }
1460         /* Partitioning of the adjacency matrix */
1461         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1462         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1463         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1464         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1465         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1466         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1467         foundpart = PETSC_TRUE;
1468       }
1469       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1470     }
1471     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1472   }
1473 
1474   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1475   *outis = is;
1476 
1477   if (!foundpart) {
1478 
1479     /* Partitioning by contiguous chunks of rows */
1480 
1481     PetscInt mbs   = (rend-rstart)/bs;
1482     PetscInt start = rstart;
1483     for (i=0; i<n; i++) {
1484       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1485       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1486       start += count;
1487     }
1488 
1489   } else {
1490 
1491     /* Partitioning by adjacency of diagonal block  */
1492 
1493     const PetscInt *numbering;
1494     PetscInt       *count,nidx,*indices,*newidx,start=0;
1495     /* Get node count in each partition */
1496     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1497     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1498     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1499       for (i=0; i<n; i++) count[i] *= bs;
1500     }
1501     /* Build indices from node numbering */
1502     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1503     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1504     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1505     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1506     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1507     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1508     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1509       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1510       for (i=0; i<nidx; i++) {
1511         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1512       }
1513       ierr    = PetscFree(indices);CHKERRQ(ierr);
1514       nidx   *= bs;
1515       indices = newidx;
1516     }
1517     /* Shift to get global indices */
1518     for (i=0; i<nidx; i++) indices[i] += rstart;
1519 
1520     /* Build the index sets for each block */
1521     for (i=0; i<n; i++) {
1522       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1523       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1524       start += count[i];
1525     }
1526 
1527     ierr = PetscFree(count);CHKERRQ(ierr);
1528     ierr = PetscFree(indices);CHKERRQ(ierr);
1529     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1530     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1531 
1532   }
1533   PetscFunctionReturn(0);
1534 }
1535 
1536 /*@C
1537    PCASMDestroySubdomains - Destroys the index sets created with
1538    PCASMCreateSubdomains(). Should be called after setting subdomains
1539    with PCASMSetLocalSubdomains().
1540 
1541    Collective
1542 
1543    Input Parameters:
1544 +  n - the number of index sets
1545 .  is - the array of index sets
1546 -  is_local - the array of local index sets, can be NULL
1547 
1548    Level: advanced
1549 
1550 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1551 
1552 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1553 @*/
1554 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1555 {
1556   PetscInt       i;
1557   PetscErrorCode ierr;
1558 
1559   PetscFunctionBegin;
1560   if (n <= 0) PetscFunctionReturn(0);
1561   if (is) {
1562     PetscValidPointer(is,2);
1563     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1564     ierr = PetscFree(is);CHKERRQ(ierr);
1565   }
1566   if (is_local) {
1567     PetscValidPointer(is_local,3);
1568     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1569     ierr = PetscFree(is_local);CHKERRQ(ierr);
1570   }
1571   PetscFunctionReturn(0);
1572 }
1573 
1574 /*@
1575    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1576    preconditioner for a two-dimensional problem on a regular grid.
1577 
1578    Not Collective
1579 
1580    Input Parameters:
1581 +  m, n - the number of mesh points in the x and y directions
1582 .  M, N - the number of subdomains in the x and y directions
1583 .  dof - degrees of freedom per node
1584 -  overlap - overlap in mesh lines
1585 
1586    Output Parameters:
1587 +  Nsub - the number of subdomains created
1588 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1589 -  is_local - array of index sets defining non-overlapping subdomains
1590 
1591    Note:
1592    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1593    preconditioners.  More general related routines are
1594    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1595 
1596    Level: advanced
1597 
1598 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1599 
1600 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1601           PCASMSetOverlap()
1602 @*/
1603 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1604 {
1605   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1606   PetscErrorCode ierr;
1607   PetscInt       nidx,*idx,loc,ii,jj,count;
1608 
1609   PetscFunctionBegin;
1610   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1611 
1612   *Nsub     = N*M;
1613   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1614   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1615   ystart    = 0;
1616   loc_outer = 0;
1617   for (i=0; i<N; i++) {
1618     height = n/N + ((n % N) > i); /* height of subdomain */
1619     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1620     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1621     yright = ystart + height + overlap; if (yright > n) yright = n;
1622     xstart = 0;
1623     for (j=0; j<M; j++) {
1624       width = m/M + ((m % M) > j); /* width of subdomain */
1625       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1626       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1627       xright = xstart + width + overlap; if (xright > m) xright = m;
1628       nidx   = (xright - xleft)*(yright - yleft);
1629       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1630       loc    = 0;
1631       for (ii=yleft; ii<yright; ii++) {
1632         count = m*ii + xleft;
1633         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1634       }
1635       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1636       if (overlap == 0) {
1637         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1638 
1639         (*is_local)[loc_outer] = (*is)[loc_outer];
1640       } else {
1641         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1642           for (jj=xstart; jj<xstart+width; jj++) {
1643             idx[loc++] = m*ii + jj;
1644           }
1645         }
1646         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1647       }
1648       ierr    = PetscFree(idx);CHKERRQ(ierr);
1649       xstart += width;
1650       loc_outer++;
1651     }
1652     ystart += height;
1653   }
1654   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1655   PetscFunctionReturn(0);
1656 }
1657 
1658 /*@C
1659     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1660     only) for the additive Schwarz preconditioner.
1661 
1662     Not Collective
1663 
1664     Input Parameter:
1665 .   pc - the preconditioner context
1666 
1667     Output Parameters:
1668 +   n - the number of subdomains for this processor (default value = 1)
1669 .   is - the index sets that define the subdomains for this processor
1670 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1671 
1672 
1673     Notes:
1674     The IS numbering is in the parallel, global numbering of the vector.
1675 
1676     Level: advanced
1677 
1678 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1679 
1680 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1681           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1682 @*/
1683 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1684 {
1685   PC_ASM         *osm = (PC_ASM*)pc->data;
1686   PetscErrorCode ierr;
1687   PetscBool      match;
1688 
1689   PetscFunctionBegin;
1690   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1691   PetscValidIntPointer(n,2);
1692   if (is) PetscValidPointer(is,3);
1693   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1694   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1695   if (n) *n = osm->n_local_true;
1696   if (is) *is = osm->is;
1697   if (is_local) *is_local = osm->is_local;
1698   PetscFunctionReturn(0);
1699 }
1700 
1701 /*@C
1702     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1703     only) for the additive Schwarz preconditioner.
1704 
1705     Not Collective
1706 
1707     Input Parameter:
1708 .   pc - the preconditioner context
1709 
1710     Output Parameters:
1711 +   n - the number of matrices for this processor (default value = 1)
1712 -   mat - the matrices
1713 
1714     Level: advanced
1715 
1716     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1717 
1718            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1719 
1720 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1721 
1722 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1723           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1724 @*/
1725 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1726 {
1727   PC_ASM         *osm;
1728   PetscErrorCode ierr;
1729   PetscBool      match;
1730 
1731   PetscFunctionBegin;
1732   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1733   PetscValidIntPointer(n,2);
1734   if (mat) PetscValidPointer(mat,3);
1735   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1736   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1737   if (!match) {
1738     if (n) *n = 0;
1739     if (mat) *mat = NULL;
1740   } else {
1741     osm = (PC_ASM*)pc->data;
1742     if (n) *n = osm->n_local_true;
1743     if (mat) *mat = osm->pmat;
1744   }
1745   PetscFunctionReturn(0);
1746 }
1747 
1748 /*@
1749     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1750 
1751     Logically Collective
1752 
1753     Input Parameter:
1754 +   pc  - the preconditioner
1755 -   flg - boolean indicating whether to use subdomains defined by the DM
1756 
1757     Options Database Key:
1758 .   -pc_asm_dm_subdomains
1759 
1760     Level: intermediate
1761 
1762     Notes:
1763     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1764     so setting either of the first two effectively turns the latter off.
1765 
1766 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1767 
1768 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1769           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1770 @*/
1771 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1772 {
1773   PC_ASM         *osm = (PC_ASM*)pc->data;
1774   PetscErrorCode ierr;
1775   PetscBool      match;
1776 
1777   PetscFunctionBegin;
1778   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1779   PetscValidLogicalCollectiveBool(pc,flg,2);
1780   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1781   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1782   if (match) {
1783     osm->dm_subdomains = flg;
1784   }
1785   PetscFunctionReturn(0);
1786 }
1787 
1788 /*@
1789     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1790     Not Collective
1791 
1792     Input Parameter:
1793 .   pc  - the preconditioner
1794 
1795     Output Parameter:
1796 .   flg - boolean indicating whether to use subdomains defined by the DM
1797 
1798     Level: intermediate
1799 
1800 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1801 
1802 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1803           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1804 @*/
1805 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1806 {
1807   PC_ASM         *osm = (PC_ASM*)pc->data;
1808   PetscErrorCode ierr;
1809   PetscBool      match;
1810 
1811   PetscFunctionBegin;
1812   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1813   PetscValidPointer(flg,2);
1814   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1815   if (match) {
1816     if (flg) *flg = osm->dm_subdomains;
1817   }
1818   PetscFunctionReturn(0);
1819 }
1820 
1821 /*@
1822      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1823 
1824    Not Collective
1825 
1826    Input Parameter:
1827 .  pc - the PC
1828 
1829    Output Parameter:
1830 .  -pc_asm_sub_mat_type - name of matrix type
1831 
1832    Level: advanced
1833 
1834 .keywords: PC, PCASM, MatType, set
1835 
1836 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1837 @*/
1838 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1839   PetscErrorCode ierr;
1840 
1841   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1842   PetscFunctionReturn(0);
1843 }
1844 
1845 /*@
1846      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1847 
1848    Collective on Mat
1849 
1850    Input Parameters:
1851 +  pc             - the PC object
1852 -  sub_mat_type   - matrix type
1853 
1854    Options Database Key:
1855 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1856 
1857    Notes:
1858    See "${PETSC_DIR}/include/petscmat.h" for available types
1859 
1860   Level: advanced
1861 
1862 .keywords: PC, PCASM, MatType, set
1863 
1864 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1865 @*/
1866 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1867 {
1868   PetscErrorCode ierr;
1869 
1870   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1871   PetscFunctionReturn(0);
1872 }
1873