xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision d172c84b7da0a47fed8c9ccbc9dfdecdae586ec8)
1 
2 /*
3   This file defines an additive Schwarz preconditioner for any Mat implementation.
4 
5   Note that each processor may have any number of subdomains. But in order to
6   deal easily with the VecScatter(), we treat each processor as if it has the
7   same number of subdomains.
8 
9        n - total number of true subdomains on all processors
10        n_local_true - actual number of subdomains on this processor
11        n_local = maximum over all processors of n_local_true
12 */
13 #include <petsc/private/pcimpl.h>     /*I "petscpc.h" I*/
14 #include <petscdm.h>
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter restriction;         /* mapping from global to overlapping (process) subdomain*/
21   VecScatter *lrestriction;       /* mapping from subregion to overlapping (process) subdomain */
22   VecScatter *lprolongation;      /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */
23   Vec        lx, ly;              /* work vectors */
24   Vec        *x,*y;  		  /* work vectors */
25   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
26   IS         *is;                 /* index set that defines each overlapping subdomain */
27   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
28   Mat        *mat,*pmat;          /* mat is not currently used */
29   PCASMType  type;                /* use reduced interpolation, restriction or both */
30   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
31   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
32   PetscBool  sort_indices;        /* flag to sort subdomain indices */
33   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
34   PCCompositeType loctype;        /* the type of composition for local solves */
35   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
36   /* For multiplicative solve */
37   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
38 } PC_ASM;
39 
40 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
41 {
42   PC_ASM         *osm = (PC_ASM*)pc->data;
43   PetscErrorCode ierr;
44   PetscMPIInt    rank;
45   PetscInt       i,bsz;
46   PetscBool      iascii,isstring;
47   PetscViewer    sviewer;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
52   if (iascii) {
53     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
54     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
55     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
56     ierr = PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
57     ierr = PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
58     if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);}
59     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);}
60     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
61     if (osm->same_local_solves) {
62       if (osm->ksp) {
63         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
64         ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
65         if (!rank) {
66           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
67           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
68           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
69         }
70         ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
71       }
72     } else {
73       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
75       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
76       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
77       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
78       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
80       for (i=0; i<osm->n_local_true; i++) {
81         ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
82         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
83         ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
84         ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
85       }
86       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
87       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
88       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
89       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
90     }
91   } else if (isstring) {
92     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
93     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
94     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
95     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
96   }
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode PCASMPrintSubdomains(PC pc)
101 {
102   PC_ASM         *osm = (PC_ASM*)pc->data;
103   const char     *prefix;
104   char           fname[PETSC_MAX_PATH_LEN+1];
105   PetscViewer    viewer, sviewer;
106   char           *s;
107   PetscInt       i,j,nidx;
108   const PetscInt *idx;
109   PetscMPIInt    rank, size;
110   PetscErrorCode ierr;
111 
112   PetscFunctionBegin;
113   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr);
114   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr);
115   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
116   ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr);
117   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
118   ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr);
119   for (i=0; i<osm->n_local; i++) {
120     if (i < osm->n_local_true) {
121       ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
123       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
124       ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
125       ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
126       ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr);
127       for (j=0; j<nidx; j++) {
128         ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
129       }
130       ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
131       ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
132       ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
133       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
134       ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
135       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
136       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
137       ierr = PetscFree(s);CHKERRQ(ierr);
138       if (osm->is_local) {
139         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
140         ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr);
141         ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr);
142         ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr);
143         ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
144         ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
145         for (j=0; j<nidx; j++) {
146           ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr);
147         }
148         ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
149         ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr);
150         ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr);
151         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
152         ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr);
153         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
154         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
155         ierr = PetscFree(s);CHKERRQ(ierr);
156       }
157     } else {
158       /* Participate in collective viewer calls. */
159       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
160       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
161       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
162       /* Assume either all ranks have is_local or none do. */
163       if (osm->is_local) {
164         ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
165         ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
166         ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
167       }
168     }
169   }
170   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
171   ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
172   PetscFunctionReturn(0);
173 }
174 
175 static PetscErrorCode PCSetUp_ASM(PC pc)
176 {
177   PC_ASM         *osm = (PC_ASM*)pc->data;
178   PetscErrorCode ierr;
179   PetscBool      symset,flg;
180   PetscInt       i,m,m_local;
181   MatReuse       scall = MAT_REUSE_MATRIX;
182   IS             isl;
183   KSP            ksp;
184   PC             subpc;
185   const char     *prefix,*pprefix;
186   Vec            vec;
187   DM             *domain_dm = NULL;
188 
189   PetscFunctionBegin;
190   if (!pc->setupcalled) {
191     PetscInt m;
192 
193     if (!osm->type_set) {
194       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
195       if (symset && flg) osm->type = PC_ASM_BASIC;
196     }
197 
198     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
199     if (osm->n_local_true == PETSC_DECIDE) {
200       /* no subdomains given */
201       /* try pc->dm first, if allowed */
202       if (osm->dm_subdomains && pc->dm) {
203         PetscInt  num_domains, d;
204         char      **domain_names;
205         IS        *inner_domain_is, *outer_domain_is;
206         ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr);
207         osm->overlap = -1; /* We do not want to increase the overlap of the IS.
208                               A future improvement of this code might allow one to use
209                               DM-defined subdomains and also increase the overlap,
210                               but that is not currently supported */
211         if (num_domains) {
212           ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr);
213         }
214         for (d = 0; d < num_domains; ++d) {
215           if (domain_names)    {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);}
216           if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);}
217           if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);}
218         }
219         ierr = PetscFree(domain_names);CHKERRQ(ierr);
220         ierr = PetscFree(inner_domain_is);CHKERRQ(ierr);
221         ierr = PetscFree(outer_domain_is);CHKERRQ(ierr);
222       }
223       if (osm->n_local_true == PETSC_DECIDE) {
224         /* still no subdomains; use one subdomain per processor */
225         osm->n_local_true = 1;
226       }
227     }
228     { /* determine the global and max number of subdomains */
229       struct {PetscInt max,sum;} inwork,outwork;
230       PetscMPIInt size;
231 
232       inwork.max   = osm->n_local_true;
233       inwork.sum   = osm->n_local_true;
234       ierr         = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
235       osm->n_local = outwork.max;
236       osm->n       = outwork.sum;
237 
238       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
239       if (outwork.max == 1 && outwork.sum == size) {
240         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
241         ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr);
242       }
243     }
244     if (!osm->is) { /* create the index sets */
245       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
246     }
247     if (osm->n_local_true > 1 && !osm->is_local) {
248       ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr);
249       for (i=0; i<osm->n_local_true; i++) {
250         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
251           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
252           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
253         } else {
254           ierr             = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
255           osm->is_local[i] = osm->is[i];
256         }
257       }
258     }
259     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
260     flg  = PETSC_FALSE;
261     ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr);
262     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
263 
264     if (osm->overlap > 0) {
265       /* Extend the "overlapping" regions by a number of steps */
266       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
267     }
268     if (osm->sort_indices) {
269       for (i=0; i<osm->n_local_true; i++) {
270         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
271         if (osm->is_local) {
272           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
273         }
274       }
275     }
276 
277     if (!osm->ksp) {
278       /* Create the local solvers */
279       ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr);
280       if (domain_dm) {
281         ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr);
282       }
283       for (i=0; i<osm->n_local_true; i++) {
284         ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
285         ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr);
286         ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr);
287         ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
288         ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
289         ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
290         ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
291         ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
292         ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
293         if (domain_dm) {
294           ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr);
295           ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr);
296           ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr);
297         }
298         osm->ksp[i] = ksp;
299       }
300       if (domain_dm) {
301         ierr = PetscFree(domain_dm);CHKERRQ(ierr);
302       }
303     }
304 
305     ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr);
306     ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr);
307     ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr);
308     ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr);
309     ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr);
310 
311     scall = MAT_INITIAL_MATRIX;
312   } else {
313     /*
314        Destroy the blocks from the previous iteration
315     */
316     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
317       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
318       scall = MAT_INITIAL_MATRIX;
319     }
320   }
321 
322   /*
323      Extract out the submatrices
324   */
325   ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
326   if (scall == MAT_INITIAL_MATRIX) {
327     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
328     for (i=0; i<osm->n_local_true; i++) {
329       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr);
330       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
331     }
332   }
333 
334   /* Convert the types of the submatrices (if needbe) */
335   if (osm->sub_mat_type) {
336     for (i=0; i<osm->n_local_true; i++) {
337       ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr);
338     }
339   }
340 
341   if(!pc->setupcalled){
342     /* Create the local work vectors (from the local matrices) and scatter contexts */
343     ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
344 
345     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
346     if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) {
347       ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr);
348     }
349     ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr);
350     ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr);
351     ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr);
352 
353     ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr);
354     ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
355     ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr);
356     ierr = ISDestroy(&isl);CHKERRQ(ierr);
357 
358 
359     for (i=0; i<osm->n_local_true; ++i) {
360       ISLocalToGlobalMapping ltog;
361       IS                     isll;
362       const PetscInt         *idx_is;
363       PetscInt               *idx_lis,nout;
364 
365       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
366       ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr);
367       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
368 
369       /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */
370       ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
371       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
372       ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
373       ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr);
374       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr);
375       if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis");
376       ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr);
377       ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
378       ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
379       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
380       ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr);
381       ierr = ISDestroy(&isll);CHKERRQ(ierr);
382       ierr = ISDestroy(&isl);CHKERRQ(ierr);
383       if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */
384 	ISLocalToGlobalMapping ltog;
385         IS                     isll,isll_local;
386         const PetscInt         *idx_local;
387         PetscInt               *idx1, *idx2, nout;
388 
389         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
390         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
391 
392 	ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
393 	ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr);
394 	ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr);
395         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
396         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
397         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
398 
399 	ierr = ISLocalToGlobalMappingCreateIS(osm->lis,&ltog);CHKERRQ(ierr);
400 	ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr);
401       	ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr);
402         ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
403         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis");
404         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr);
405 
406 	ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
407 	ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr);
408 
409         ierr = ISDestroy(&isll);CHKERRQ(ierr);
410 	ierr = ISDestroy(&isll_local);CHKERRQ(ierr);
411       }
412     }
413     ierr = VecDestroy(&vec);CHKERRQ(ierr);
414   }
415 
416   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
417     IS      *cis;
418     PetscInt c;
419 
420     ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr);
421     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
422     ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr);
423     ierr = PetscFree(cis);CHKERRQ(ierr);
424   }
425 
426   /* Return control to the user so that the submatrices can be modified (e.g., to apply
427      different boundary conditions for the submatrices than for the global problem) */
428   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
429 
430   /*
431      Loop over subdomains putting them into local ksp
432   */
433   for (i=0; i<osm->n_local_true; i++) {
434     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr);
435     if (!pc->setupcalled) {
436       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
437     }
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
443 {
444   PC_ASM             *osm = (PC_ASM*)pc->data;
445   PetscErrorCode     ierr;
446   PetscInt           i;
447   KSPConvergedReason reason;
448 
449   PetscFunctionBegin;
450   for (i=0; i<osm->n_local_true; i++) {
451     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
452     ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr);
453     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
454       pc->failedreason = PC_SUBPC_ERROR;
455     }
456   }
457   PetscFunctionReturn(0);
458 }
459 
460 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
461 {
462   PC_ASM         *osm = (PC_ASM*)pc->data;
463   PetscErrorCode ierr;
464   PetscInt       i,n_local_true = osm->n_local_true;
465   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
466 
467   PetscFunctionBegin;
468   /*
469      Support for limiting the restriction or interpolation to only local
470      subdomain values (leaving the other values 0).
471   */
472   if (!(osm->type & PC_ASM_RESTRICT)) {
473     forward = SCATTER_FORWARD_LOCAL;
474     /* have to zero the work RHS since scatter may leave some slots empty */
475     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
476   }
477   if (!(osm->type & PC_ASM_INTERPOLATE)) {
478     reverse = SCATTER_REVERSE_LOCAL;
479   }
480 
481   if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){
482     /* zero the global and the local solutions */
483     ierr = VecZeroEntries(y);CHKERRQ(ierr);
484     ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
485 
486     /* Copy the global RHS to local RHS including the ghost nodes */
487     ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
488     ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
489 
490     /* Restrict local RHS to the overlapping 0-block RHS */
491     ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
492     ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
493 
494     /* do the local solves */
495     for (i = 0; i < n_local_true; ++i) {
496 
497       /* solve the overlapping i-block */
498       ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
499 
500       if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */
501  	ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
502  	ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
503       }
504       else{ /* interpolate the overalapping i-block solution to the local solution */
505         ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
506 	ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
507       }
508 
509       if (i < n_local_true-1) {
510 	/* Restrict local RHS to the overlapping (i+1)-block RHS */
511 	ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
512 	ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
513 
514 	if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){
515 	  /* udpdate the overlapping (i+1)-block RHS using the current local solution */
516 	  ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr);
517 	  ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr);
518 	}
519       }
520     }
521     /* Add the local solution to the global solution including the ghost nodes */
522     ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
523     ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
524   }else{
525     SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
526   }
527   PetscFunctionReturn(0);
528 }
529 
530 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
531 {
532   PC_ASM         *osm = (PC_ASM*)pc->data;
533   PetscErrorCode ierr;
534   PetscInt       i,n_local_true = osm->n_local_true;
535   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
536 
537   PetscFunctionBegin;
538   /*
539      Support for limiting the restriction or interpolation to only local
540      subdomain values (leaving the other values 0).
541 
542      Note: these are reversed from the PCApply_ASM() because we are applying the
543      transpose of the three terms
544   */
545 
546   if (!(osm->type & PC_ASM_INTERPOLATE)) {
547     forward = SCATTER_FORWARD_LOCAL;
548     /* have to zero the work RHS since scatter may leave some slots empty */
549     ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr);
550   }
551   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;
552 
553   /* zero the global and the local solutions */
554   ierr = VecZeroEntries(y);CHKERRQ(ierr);
555   ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr);
556 
557   /* Copy the global RHS to local RHS including the ghost nodes */
558   ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
559   ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr);
560 
561   /* Restrict local RHS to the overlapping 0-block RHS */
562   ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr);
563   ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0],  INSERT_VALUES, forward);CHKERRQ(ierr);
564 
565   /* do the local solves */
566   for (i = 0; i < n_local_true; ++i) {
567 
568     /* solve the overlapping i-block */
569     ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr);
570 
571     if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */
572      ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
573      ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr);
574     }
575     else{ /* interpolate the overalapping i-block solution to the local solution */
576       ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
577       ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr);
578     }
579 
580     if (i < n_local_true-1) {
581       /* Restrict local RHS to the overlapping (i+1)-block RHS */
582       ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
583       ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr);
584     }
585   }
586   /* Add the local solution to the global solution including the ghost nodes */
587   ierr = VecScatterBegin(osm->restriction, osm->ly, y,  ADD_VALUES, reverse);CHKERRQ(ierr);
588   ierr = VecScatterEnd(osm->restriction,  osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr);
589 
590   PetscFunctionReturn(0);
591 
592 }
593 
594 static PetscErrorCode PCReset_ASM(PC pc)
595 {
596   PC_ASM         *osm = (PC_ASM*)pc->data;
597   PetscErrorCode ierr;
598   PetscInt       i;
599 
600   PetscFunctionBegin;
601   if (osm->ksp) {
602     for (i=0; i<osm->n_local_true; i++) {
603       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
604     }
605   }
606   if (osm->pmat) {
607     if (osm->n_local_true > 0) {
608       ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
609     }
610   }
611   if (osm->lrestriction) {
612     ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr);
613     for (i=0; i<osm->n_local_true; i++) {
614       ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr);
615       if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);}
616       ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr);
617       ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr);
618     }
619     ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr);
620     if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);}
621     ierr = PetscFree(osm->x);CHKERRQ(ierr);
622     ierr = PetscFree(osm->y);CHKERRQ(ierr);
623 
624   }
625   ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
626   ierr = ISDestroy(&osm->lis);CHKERRQ(ierr);
627   ierr = VecDestroy(&osm->lx);CHKERRQ(ierr);
628   ierr = VecDestroy(&osm->ly);CHKERRQ(ierr);
629   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
630     ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr);
631   }
632 
633   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
634 
635   osm->is       = 0;
636   osm->is_local = 0;
637   PetscFunctionReturn(0);
638 }
639 
640 static PetscErrorCode PCDestroy_ASM(PC pc)
641 {
642   PC_ASM         *osm = (PC_ASM*)pc->data;
643   PetscErrorCode ierr;
644   PetscInt       i;
645 
646   PetscFunctionBegin;
647   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
648   if (osm->ksp) {
649     for (i=0; i<osm->n_local_true; i++) {
650       ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr);
651     }
652     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
653   }
654   ierr = PetscFree(pc->data);CHKERRQ(ierr);
655   PetscFunctionReturn(0);
656 }
657 
658 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
659 {
660   PC_ASM         *osm = (PC_ASM*)pc->data;
661   PetscErrorCode ierr;
662   PetscInt       blocks,ovl;
663   PetscBool      symset,flg;
664   PCASMType      asmtype;
665   PCCompositeType loctype;
666   char           sub_mat_type[256];
667 
668   PetscFunctionBegin;
669   /* set the type to symmetric if matrix is symmetric */
670   if (!osm->type_set && pc->pmat) {
671     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
672     if (symset && flg) osm->type = PC_ASM_BASIC;
673   }
674   ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr);
675   ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr);
676   ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
677   if (flg) {
678     ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
679     osm->dm_subdomains = PETSC_FALSE;
680   }
681   ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr);
682   if (flg) {
683     ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr);
684     osm->dm_subdomains = PETSC_FALSE;
685   }
686   ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
687   if (flg) {
688     ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr);
689     osm->dm_subdomains = PETSC_FALSE;
690   }
691   flg  = PETSC_FALSE;
692   ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
693   if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
694   flg  = PETSC_FALSE;
695   ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr);
696   if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); }
697   ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr);
698   if(flg){
699     ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr);
700   }
701   ierr = PetscOptionsTail();CHKERRQ(ierr);
702   PetscFunctionReturn(0);
703 }
704 
705 /*------------------------------------------------------------------------------------*/
706 
707 static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
708 {
709   PC_ASM         *osm = (PC_ASM*)pc->data;
710   PetscErrorCode ierr;
711   PetscInt       i;
712 
713   PetscFunctionBegin;
714   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
715   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
716 
717   if (!pc->setupcalled) {
718     if (is) {
719       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
720     }
721     if (is_local) {
722       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
723     }
724     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
725 
726     osm->n_local_true = n;
727     osm->is           = 0;
728     osm->is_local     = 0;
729     if (is) {
730       ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr);
731       for (i=0; i<n; i++) osm->is[i] = is[i];
732       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
733       osm->overlap = -1;
734     }
735     if (is_local) {
736       ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr);
737       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
738       if (!is) {
739         ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr);
740         for (i=0; i<osm->n_local_true; i++) {
741           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
742             ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr);
743             ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr);
744           } else {
745             ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr);
746             osm->is[i] = osm->is_local[i];
747           }
748         }
749       }
750     }
751   }
752   PetscFunctionReturn(0);
753 }
754 
755 static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
756 {
757   PC_ASM         *osm = (PC_ASM*)pc->data;
758   PetscErrorCode ierr;
759   PetscMPIInt    rank,size;
760   PetscInt       n;
761 
762   PetscFunctionBegin;
763   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
764   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
765 
766   /*
767      Split the subdomains equally among all processors
768   */
769   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
770   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr);
771   n    = N/size + ((N % size) > rank);
772   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
773   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
774   if (!pc->setupcalled) {
775     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
776 
777     osm->n_local_true = n;
778     osm->is           = 0;
779     osm->is_local     = 0;
780   }
781   PetscFunctionReturn(0);
782 }
783 
784 static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
785 {
786   PC_ASM *osm = (PC_ASM*)pc->data;
787 
788   PetscFunctionBegin;
789   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
790   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
791   if (!pc->setupcalled) osm->overlap = ovl;
792   PetscFunctionReturn(0);
793 }
794 
795 static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
796 {
797   PC_ASM *osm = (PC_ASM*)pc->data;
798 
799   PetscFunctionBegin;
800   osm->type     = type;
801   osm->type_set = PETSC_TRUE;
802   PetscFunctionReturn(0);
803 }
804 
805 static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
806 {
807   PC_ASM *osm = (PC_ASM*)pc->data;
808 
809   PetscFunctionBegin;
810   *type = osm->type;
811   PetscFunctionReturn(0);
812 }
813 
814 static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
815 {
816   PC_ASM *osm = (PC_ASM *) pc->data;
817 
818   PetscFunctionBegin;
819   if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type");
820   osm->loctype = type;
821   PetscFunctionReturn(0);
822 }
823 
824 static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
825 {
826   PC_ASM *osm = (PC_ASM *) pc->data;
827 
828   PetscFunctionBegin;
829   *type = osm->loctype;
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
834 {
835   PC_ASM *osm = (PC_ASM*)pc->data;
836 
837   PetscFunctionBegin;
838   osm->sort_indices = doSort;
839   PetscFunctionReturn(0);
840 }
841 
842 static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
843 {
844   PC_ASM         *osm = (PC_ASM*)pc->data;
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
849 
850   if (n_local) *n_local = osm->n_local_true;
851   if (first_local) {
852     ierr          = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr);
853     *first_local -= osm->n_local_true;
854   }
855   if (ksp) {
856     /* Assume that local solves are now different; not necessarily
857        true though!  This flag is used only for PCView_ASM() */
858     *ksp                   = osm->ksp;
859     osm->same_local_solves = PETSC_FALSE;
860   }
861   PetscFunctionReturn(0);
862 }
863 
864 static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
865 {
866   PC_ASM         *osm = (PC_ASM*)pc->data;
867 
868   PetscFunctionBegin;
869   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
870   PetscValidPointer(sub_mat_type,2);
871   *sub_mat_type = osm->sub_mat_type;
872   PetscFunctionReturn(0);
873 }
874 
875 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
876 {
877   PetscErrorCode    ierr;
878   PC_ASM            *osm = (PC_ASM*)pc->data;
879 
880   PetscFunctionBegin;
881   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
882   ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr);
883   ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr);
884   PetscFunctionReturn(0);
885 }
886 
887 /*@C
888     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.
889 
890     Collective on PC
891 
892     Input Parameters:
893 +   pc - the preconditioner context
894 .   n - the number of subdomains for this processor (default value = 1)
895 .   is - the index set that defines the subdomains for this processor
896          (or NULL for PETSc to determine subdomains)
897 -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
898          (or NULL to not provide these)
899 
900     Options Database Key:
901     To set the total number of subdomain blocks rather than specify the
902     index sets, use the option
903 .    -pc_asm_local_blocks <blks> - Sets local blocks
904 
905     Notes:
906     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
907 
908     By default the ASM preconditioner uses 1 block per processor.
909 
910     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
911 
912     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
913     back to form the global solution (this is the standard restricted additive Schwarz method)
914 
915     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
916     no code to handle that case.
917 
918     Level: advanced
919 
920 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
921 
922 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
923           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
924 @*/
925 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
926 {
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
931   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 /*@C
936     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
937     additive Schwarz preconditioner.  Either all or no processors in the
938     PC communicator must call this routine, with the same index sets.
939 
940     Collective on PC
941 
942     Input Parameters:
943 +   pc - the preconditioner context
944 .   N  - the number of subdomains for all processors
945 .   is - the index sets that define the subdomains for all processors
946          (or NULL to ask PETSc to determine the subdomains)
947 -   is_local - the index sets that define the local part of the subdomains for this processor
948          (or NULL to not provide this information)
949 
950     Options Database Key:
951     To set the total number of subdomain blocks rather than specify the
952     index sets, use the option
953 .    -pc_asm_blocks <blks> - Sets total blocks
954 
955     Notes:
956     Currently you cannot use this to set the actual subdomains with the argument is or is_local.
957 
958     By default the ASM preconditioner uses 1 block per processor.
959 
960     These index sets cannot be destroyed until after completion of the
961     linear solves for which the ASM preconditioner is being used.
962 
963     Use PCASMSetLocalSubdomains() to set local subdomains.
964 
965     The IS numbering is in the parallel, global numbering of the vector for both is and is_local
966 
967     Level: advanced
968 
969 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
970 
971 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
972           PCASMCreateSubdomains2D()
973 @*/
974 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
975 {
976   PetscErrorCode ierr;
977 
978   PetscFunctionBegin;
979   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
980   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
981   PetscFunctionReturn(0);
982 }
983 
984 /*@
985     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
986     additive Schwarz preconditioner.  Either all or no processors in the
987     PC communicator must call this routine.
988 
989     Logically Collective on PC
990 
991     Input Parameters:
992 +   pc  - the preconditioner context
993 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
994 
995     Options Database Key:
996 .   -pc_asm_overlap <ovl> - Sets overlap
997 
998     Notes:
999     By default the ASM preconditioner uses 1 block per processor.  To use
1000     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
1001     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
1002 
1003     The overlap defaults to 1, so if one desires that no additional
1004     overlap be computed beyond what may have been set with a call to
1005     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
1006     must be set to be 0.  In particular, if one does not explicitly set
1007     the subdomains an application code, then all overlap would be computed
1008     internally by PETSc, and using an overlap of 0 would result in an ASM
1009     variant that is equivalent to the block Jacobi preconditioner.
1010 
1011     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
1012     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.
1013 
1014     Note that one can define initial index sets with any overlap via
1015     PCASMSetLocalSubdomains(); the routine
1016     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1017     if desired.
1018 
1019     Level: intermediate
1020 
1021 .keywords: PC, ASM, set, overlap
1022 
1023 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1024           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1025 @*/
1026 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1027 {
1028   PetscErrorCode ierr;
1029 
1030   PetscFunctionBegin;
1031   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1032   PetscValidLogicalCollectiveInt(pc,ovl,2);
1033   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
1034   PetscFunctionReturn(0);
1035 }
1036 
1037 /*@
1038     PCASMSetType - Sets the type of restriction and interpolation used
1039     for local problems in the additive Schwarz method.
1040 
1041     Logically Collective on PC
1042 
1043     Input Parameters:
1044 +   pc  - the preconditioner context
1045 -   type - variant of ASM, one of
1046 .vb
1047       PC_ASM_BASIC       - full interpolation and restriction
1048       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1049       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1050       PC_ASM_NONE        - local processor restriction and interpolation
1051 .ve
1052 
1053     Options Database Key:
1054 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1055 
1056     Notes: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1057     to limit the local processor interpolation
1058 
1059     Level: intermediate
1060 
1061 .keywords: PC, ASM, set, type
1062 
1063 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1064           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1065 @*/
1066 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1067 {
1068   PetscErrorCode ierr;
1069 
1070   PetscFunctionBegin;
1071   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1072   PetscValidLogicalCollectiveEnum(pc,type,2);
1073   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 /*@
1078     PCASMGetType - Gets the type of restriction and interpolation used
1079     for local problems in the additive Schwarz method.
1080 
1081     Logically Collective on PC
1082 
1083     Input Parameter:
1084 .   pc  - the preconditioner context
1085 
1086     Output Parameter:
1087 .   type - variant of ASM, one of
1088 
1089 .vb
1090       PC_ASM_BASIC       - full interpolation and restriction
1091       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1092       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1093       PC_ASM_NONE        - local processor restriction and interpolation
1094 .ve
1095 
1096     Options Database Key:
1097 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
1098 
1099     Level: intermediate
1100 
1101 .keywords: PC, ASM, set, type
1102 
1103 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1104           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1105 @*/
1106 PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1107 {
1108   PetscErrorCode ierr;
1109 
1110   PetscFunctionBegin;
1111   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1112   ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr);
1113   PetscFunctionReturn(0);
1114 }
1115 
1116 /*@
1117   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.
1118 
1119   Logically Collective on PC
1120 
1121   Input Parameters:
1122 + pc  - the preconditioner context
1123 - type - type of composition, one of
1124 .vb
1125   PC_COMPOSITE_ADDITIVE       - local additive combination
1126   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1127 .ve
1128 
1129   Options Database Key:
1130 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1131 
1132   Level: intermediate
1133 
1134 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1135 @*/
1136 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1137 {
1138   PetscErrorCode ierr;
1139 
1140   PetscFunctionBegin;
1141   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1142   PetscValidLogicalCollectiveEnum(pc, type, 2);
1143   ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr);
1144   PetscFunctionReturn(0);
1145 }
1146 
1147 /*@
1148   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.
1149 
1150   Logically Collective on PC
1151 
1152   Input Parameter:
1153 . pc  - the preconditioner context
1154 
1155   Output Parameter:
1156 . type - type of composition, one of
1157 .vb
1158   PC_COMPOSITE_ADDITIVE       - local additive combination
1159   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1160 .ve
1161 
1162   Options Database Key:
1163 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type
1164 
1165   Level: intermediate
1166 
1167 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1168 @*/
1169 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1170 {
1171   PetscErrorCode ierr;
1172 
1173   PetscFunctionBegin;
1174   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1175   PetscValidPointer(type, 2);
1176   ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr);
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 /*@
1181     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
1182 
1183     Logically Collective on PC
1184 
1185     Input Parameters:
1186 +   pc  - the preconditioner context
1187 -   doSort - sort the subdomain indices
1188 
1189     Level: intermediate
1190 
1191 .keywords: PC, ASM, set, type
1192 
1193 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1194           PCASMCreateSubdomains2D()
1195 @*/
1196 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1197 {
1198   PetscErrorCode ierr;
1199 
1200   PetscFunctionBegin;
1201   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1202   PetscValidLogicalCollectiveBool(pc,doSort,2);
1203   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
1204   PetscFunctionReturn(0);
1205 }
1206 
1207 /*@C
1208    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1209    this processor.
1210 
1211    Collective on PC iff first_local is requested
1212 
1213    Input Parameter:
1214 .  pc - the preconditioner context
1215 
1216    Output Parameters:
1217 +  n_local - the number of blocks on this processor or NULL
1218 .  first_local - the global number of the first block on this processor or NULL,
1219                  all processors must request or all must pass NULL
1220 -  ksp - the array of KSP contexts
1221 
1222    Note:
1223    After PCASMGetSubKSP() the array of KSPes is not to be freed.
1224 
1225    You must call KSPSetUp() before calling PCASMGetSubKSP().
1226 
1227    Fortran note:
1228    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.
1229 
1230    Level: advanced
1231 
1232 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
1233 
1234 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1235           PCASMCreateSubdomains2D(),
1236 @*/
1237 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1238 {
1239   PetscErrorCode ierr;
1240 
1241   PetscFunctionBegin;
1242   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1243   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
1244   PetscFunctionReturn(0);
1245 }
1246 
1247 /* -------------------------------------------------------------------------------------*/
1248 /*MC
1249    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1250            its own KSP object.
1251 
1252    Options Database Keys:
1253 +  -pc_asm_blocks <blks> - Sets total blocks
1254 .  -pc_asm_overlap <ovl> - Sets overlap
1255 .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1256 -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive
1257 
1258      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1259       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1260       -pc_asm_type basic to use the standard ASM.
1261 
1262    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1263      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.
1264 
1265      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1266         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
1267 
1268      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1269          and set the options directly on the resulting KSP object (you can access its PC
1270          with KSPGetPC())
1271 
1272    Level: beginner
1273 
1274    Concepts: additive Schwarz method
1275 
1276     References:
1277 +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1278      Courant Institute, New York University Technical report
1279 -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1280     Cambridge University Press.
1281 
1282 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1283            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1284            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType
1285 
1286 M*/
1287 
1288 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1289 {
1290   PetscErrorCode ierr;
1291   PC_ASM         *osm;
1292 
1293   PetscFunctionBegin;
1294   ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr);
1295 
1296   osm->n                 = PETSC_DECIDE;
1297   osm->n_local           = 0;
1298   osm->n_local_true      = PETSC_DECIDE;
1299   osm->overlap           = 1;
1300   osm->ksp               = 0;
1301   osm->restriction       = 0;
1302   osm->lprolongation     = 0;
1303   osm->lrestriction      = 0;
1304   osm->x                 = 0;
1305   osm->y                 = 0;
1306   osm->is                = 0;
1307   osm->is_local          = 0;
1308   osm->mat               = 0;
1309   osm->pmat              = 0;
1310   osm->type              = PC_ASM_RESTRICT;
1311   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1312   osm->same_local_solves = PETSC_TRUE;
1313   osm->sort_indices      = PETSC_TRUE;
1314   osm->dm_subdomains     = PETSC_FALSE;
1315   osm->sub_mat_type      = NULL;
1316 
1317   pc->data                 = (void*)osm;
1318   pc->ops->apply           = PCApply_ASM;
1319   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1320   pc->ops->setup           = PCSetUp_ASM;
1321   pc->ops->reset           = PCReset_ASM;
1322   pc->ops->destroy         = PCDestroy_ASM;
1323   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1324   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1325   pc->ops->view            = PCView_ASM;
1326   pc->ops->applyrichardson = 0;
1327 
1328   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1329   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1330   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr);
1331   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr);
1332   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr);
1333   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr);
1334   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr);
1335   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1336   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1337   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr);
1338   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr);
1339   PetscFunctionReturn(0);
1340 }
1341 
1342 /*@C
1343    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1344    preconditioner for a any problem on a general grid.
1345 
1346    Collective
1347 
1348    Input Parameters:
1349 +  A - The global matrix operator
1350 -  n - the number of local blocks
1351 
1352    Output Parameters:
1353 .  outis - the array of index sets defining the subdomains
1354 
1355    Level: advanced
1356 
1357    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1358     from these if you use PCASMSetLocalSubdomains()
1359 
1360     In the Fortran version you must provide the array outis[] already allocated of length n.
1361 
1362 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1363 
1364 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1365 @*/
1366 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1367 {
1368   MatPartitioning mpart;
1369   const char      *prefix;
1370   void            (*f)(void);
1371   PetscInt        i,j,rstart,rend,bs;
1372   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1373   Mat             Ad     = NULL, adj;
1374   IS              ispart,isnumb,*is;
1375   PetscErrorCode  ierr;
1376 
1377   PetscFunctionBegin;
1378   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1379   PetscValidPointer(outis,3);
1380   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1381 
1382   /* Get prefix, row distribution, and block size */
1383   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1384   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1385   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1386   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1387 
1388   /* Get diagonal block from matrix if possible */
1389   ierr = MatShellGetOperation(A,MATOP_GET_DIAGONAL_BLOCK,&f);CHKERRQ(ierr);
1390   if (f) {
1391     ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr);
1392   }
1393   if (Ad) {
1394     ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1395     if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1396   }
1397   if (Ad && n > 1) {
1398     PetscBool match,done;
1399     /* Try to setup a good matrix partitioning if available */
1400     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1401     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1402     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1403     ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1404     if (!match) {
1405       ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1406     }
1407     if (!match) { /* assume a "good" partitioner is available */
1408       PetscInt       na;
1409       const PetscInt *ia,*ja;
1410       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1411       if (done) {
1412         /* Build adjacency matrix by hand. Unfortunately a call to
1413            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1414            remove the block-aij structure and we cannot expect
1415            MatPartitioning to split vertices as we need */
1416         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1417         const PetscInt *row;
1418         nnz = 0;
1419         for (i=0; i<na; i++) { /* count number of nonzeros */
1420           len = ia[i+1] - ia[i];
1421           row = ja + ia[i];
1422           for (j=0; j<len; j++) {
1423             if (row[j] == i) { /* don't count diagonal */
1424               len--; break;
1425             }
1426           }
1427           nnz += len;
1428         }
1429         ierr   = PetscMalloc1(na+1,&iia);CHKERRQ(ierr);
1430         ierr   = PetscMalloc1(nnz,&jja);CHKERRQ(ierr);
1431         nnz    = 0;
1432         iia[0] = 0;
1433         for (i=0; i<na; i++) { /* fill adjacency */
1434           cnt = 0;
1435           len = ia[i+1] - ia[i];
1436           row = ja + ia[i];
1437           for (j=0; j<len; j++) {
1438             if (row[j] != i) { /* if not diagonal */
1439               jja[nnz+cnt++] = row[j];
1440             }
1441           }
1442           nnz     += cnt;
1443           iia[i+1] = nnz;
1444         }
1445         /* Partitioning of the adjacency matrix */
1446         ierr      = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr);
1447         ierr      = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1448         ierr      = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1449         ierr      = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1450         ierr      = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1451         ierr      = MatDestroy(&adj);CHKERRQ(ierr);
1452         foundpart = PETSC_TRUE;
1453       }
1454       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1455     }
1456     ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
1457   }
1458 
1459   ierr   = PetscMalloc1(n,&is);CHKERRQ(ierr);
1460   *outis = is;
1461 
1462   if (!foundpart) {
1463 
1464     /* Partitioning by contiguous chunks of rows */
1465 
1466     PetscInt mbs   = (rend-rstart)/bs;
1467     PetscInt start = rstart;
1468     for (i=0; i<n; i++) {
1469       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1470       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1471       start += count;
1472     }
1473 
1474   } else {
1475 
1476     /* Partitioning by adjacency of diagonal block  */
1477 
1478     const PetscInt *numbering;
1479     PetscInt       *count,nidx,*indices,*newidx,start=0;
1480     /* Get node count in each partition */
1481     ierr = PetscMalloc1(n,&count);CHKERRQ(ierr);
1482     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1483     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1484       for (i=0; i<n; i++) count[i] *= bs;
1485     }
1486     /* Build indices from node numbering */
1487     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1488     ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr);
1489     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1490     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1491     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1492     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1493     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1494       ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr);
1495       for (i=0; i<nidx; i++) {
1496         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1497       }
1498       ierr    = PetscFree(indices);CHKERRQ(ierr);
1499       nidx   *= bs;
1500       indices = newidx;
1501     }
1502     /* Shift to get global indices */
1503     for (i=0; i<nidx; i++) indices[i] += rstart;
1504 
1505     /* Build the index sets for each block */
1506     for (i=0; i<n; i++) {
1507       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1508       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1509       start += count[i];
1510     }
1511 
1512     ierr = PetscFree(count);CHKERRQ(ierr);
1513     ierr = PetscFree(indices);CHKERRQ(ierr);
1514     ierr = ISDestroy(&isnumb);CHKERRQ(ierr);
1515     ierr = ISDestroy(&ispart);CHKERRQ(ierr);
1516 
1517   }
1518   PetscFunctionReturn(0);
1519 }
1520 
1521 /*@C
1522    PCASMDestroySubdomains - Destroys the index sets created with
1523    PCASMCreateSubdomains(). Should be called after setting subdomains
1524    with PCASMSetLocalSubdomains().
1525 
1526    Collective
1527 
1528    Input Parameters:
1529 +  n - the number of index sets
1530 .  is - the array of index sets
1531 -  is_local - the array of local index sets, can be NULL
1532 
1533    Level: advanced
1534 
1535 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1536 
1537 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1538 @*/
1539 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1540 {
1541   PetscInt       i;
1542   PetscErrorCode ierr;
1543 
1544   PetscFunctionBegin;
1545   if (n <= 0) PetscFunctionReturn(0);
1546   if (is) {
1547     PetscValidPointer(is,2);
1548     for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); }
1549     ierr = PetscFree(is);CHKERRQ(ierr);
1550   }
1551   if (is_local) {
1552     PetscValidPointer(is_local,3);
1553     for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); }
1554     ierr = PetscFree(is_local);CHKERRQ(ierr);
1555   }
1556   PetscFunctionReturn(0);
1557 }
1558 
1559 /*@
1560    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1561    preconditioner for a two-dimensional problem on a regular grid.
1562 
1563    Not Collective
1564 
1565    Input Parameters:
1566 +  m, n - the number of mesh points in the x and y directions
1567 .  M, N - the number of subdomains in the x and y directions
1568 .  dof - degrees of freedom per node
1569 -  overlap - overlap in mesh lines
1570 
1571    Output Parameters:
1572 +  Nsub - the number of subdomains created
1573 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1574 -  is_local - array of index sets defining non-overlapping subdomains
1575 
1576    Note:
1577    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1578    preconditioners.  More general related routines are
1579    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1580 
1581    Level: advanced
1582 
1583 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1584 
1585 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1586           PCASMSetOverlap()
1587 @*/
1588 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1589 {
1590   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1591   PetscErrorCode ierr;
1592   PetscInt       nidx,*idx,loc,ii,jj,count;
1593 
1594   PetscFunctionBegin;
1595   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1596 
1597   *Nsub     = N*M;
1598   ierr      = PetscMalloc1(*Nsub,is);CHKERRQ(ierr);
1599   ierr      = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr);
1600   ystart    = 0;
1601   loc_outer = 0;
1602   for (i=0; i<N; i++) {
1603     height = n/N + ((n % N) > i); /* height of subdomain */
1604     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1605     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1606     yright = ystart + height + overlap; if (yright > n) yright = n;
1607     xstart = 0;
1608     for (j=0; j<M; j++) {
1609       width = m/M + ((m % M) > j); /* width of subdomain */
1610       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1611       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1612       xright = xstart + width + overlap; if (xright > m) xright = m;
1613       nidx   = (xright - xleft)*(yright - yleft);
1614       ierr   = PetscMalloc1(nidx,&idx);CHKERRQ(ierr);
1615       loc    = 0;
1616       for (ii=yleft; ii<yright; ii++) {
1617         count = m*ii + xleft;
1618         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1619       }
1620       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1621       if (overlap == 0) {
1622         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1623 
1624         (*is_local)[loc_outer] = (*is)[loc_outer];
1625       } else {
1626         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1627           for (jj=xstart; jj<xstart+width; jj++) {
1628             idx[loc++] = m*ii + jj;
1629           }
1630         }
1631         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1632       }
1633       ierr    = PetscFree(idx);CHKERRQ(ierr);
1634       xstart += width;
1635       loc_outer++;
1636     }
1637     ystart += height;
1638   }
1639   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1640   PetscFunctionReturn(0);
1641 }
1642 
1643 /*@C
1644     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1645     only) for the additive Schwarz preconditioner.
1646 
1647     Not Collective
1648 
1649     Input Parameter:
1650 .   pc - the preconditioner context
1651 
1652     Output Parameters:
1653 +   n - the number of subdomains for this processor (default value = 1)
1654 .   is - the index sets that define the subdomains for this processor
1655 -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)
1656 
1657 
1658     Notes:
1659     The IS numbering is in the parallel, global numbering of the vector.
1660 
1661     Level: advanced
1662 
1663 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1664 
1665 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1666           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1667 @*/
1668 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1669 {
1670   PC_ASM         *osm = (PC_ASM*)pc->data;
1671   PetscErrorCode ierr;
1672   PetscBool      match;
1673 
1674   PetscFunctionBegin;
1675   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1676   PetscValidIntPointer(n,2);
1677   if (is) PetscValidPointer(is,3);
1678   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1679   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1680   if (n) *n = osm->n_local_true;
1681   if (is) *is = osm->is;
1682   if (is_local) *is_local = osm->is_local;
1683   PetscFunctionReturn(0);
1684 }
1685 
1686 /*@C
1687     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1688     only) for the additive Schwarz preconditioner.
1689 
1690     Not Collective
1691 
1692     Input Parameter:
1693 .   pc - the preconditioner context
1694 
1695     Output Parameters:
1696 +   n - the number of matrices for this processor (default value = 1)
1697 -   mat - the matrices
1698 
1699     Level: advanced
1700 
1701     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())
1702 
1703            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.
1704 
1705 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1706 
1707 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1708           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1709 @*/
1710 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1711 {
1712   PC_ASM         *osm;
1713   PetscErrorCode ierr;
1714   PetscBool      match;
1715 
1716   PetscFunctionBegin;
1717   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1718   PetscValidIntPointer(n,2);
1719   if (mat) PetscValidPointer(mat,3);
1720   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1721   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1722   if (!match) {
1723     if (n) *n = 0;
1724     if (mat) *mat = NULL;
1725   } else {
1726     osm = (PC_ASM*)pc->data;
1727     if (n) *n = osm->n_local_true;
1728     if (mat) *mat = osm->pmat;
1729   }
1730   PetscFunctionReturn(0);
1731 }
1732 
1733 /*@
1734     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1735 
1736     Logically Collective
1737 
1738     Input Parameter:
1739 +   pc  - the preconditioner
1740 -   flg - boolean indicating whether to use subdomains defined by the DM
1741 
1742     Options Database Key:
1743 .   -pc_asm_dm_subdomains
1744 
1745     Level: intermediate
1746 
1747     Notes:
1748     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1749     so setting either of the first two effectively turns the latter off.
1750 
1751 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1752 
1753 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1754           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1755 @*/
1756 PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1757 {
1758   PC_ASM         *osm = (PC_ASM*)pc->data;
1759   PetscErrorCode ierr;
1760   PetscBool      match;
1761 
1762   PetscFunctionBegin;
1763   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1764   PetscValidLogicalCollectiveBool(pc,flg,2);
1765   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1766   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1767   if (match) {
1768     osm->dm_subdomains = flg;
1769   }
1770   PetscFunctionReturn(0);
1771 }
1772 
1773 /*@
1774     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1775     Not Collective
1776 
1777     Input Parameter:
1778 .   pc  - the preconditioner
1779 
1780     Output Parameter:
1781 .   flg - boolean indicating whether to use subdomains defined by the DM
1782 
1783     Level: intermediate
1784 
1785 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz
1786 
1787 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1788           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1789 @*/
1790 PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1791 {
1792   PC_ASM         *osm = (PC_ASM*)pc->data;
1793   PetscErrorCode ierr;
1794   PetscBool      match;
1795 
1796   PetscFunctionBegin;
1797   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1798   PetscValidPointer(flg,2);
1799   ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1800   if (match) {
1801     if (flg) *flg = osm->dm_subdomains;
1802   }
1803   PetscFunctionReturn(0);
1804 }
1805 
1806 /*@
1807      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.
1808 
1809    Not Collective
1810 
1811    Input Parameter:
1812 .  pc - the PC
1813 
1814    Output Parameter:
1815 .  -pc_asm_sub_mat_type - name of matrix type
1816 
1817    Level: advanced
1818 
1819 .keywords: PC, PCASM, MatType, set
1820 
1821 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1822 @*/
1823 PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){
1824   PetscErrorCode ierr;
1825 
1826   ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr);
1827   PetscFunctionReturn(0);
1828 }
1829 
1830 /*@
1831      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves
1832 
1833    Collective on Mat
1834 
1835    Input Parameters:
1836 +  pc             - the PC object
1837 -  sub_mat_type   - matrix type
1838 
1839    Options Database Key:
1840 .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.
1841 
1842    Notes:
1843    See "${PETSC_DIR}/include/petscmat.h" for available types
1844 
1845   Level: advanced
1846 
1847 .keywords: PC, PCASM, MatType, set
1848 
1849 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1850 @*/
1851 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1852 {
1853   PetscErrorCode ierr;
1854 
1855   ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr);
1856   PetscFunctionReturn(0);
1857 }
1858