xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 13e0d08325be81619ffebf1117f9a3b4ad59ea83)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscBool  sort_indices;        /* flag to sort subdomain indices */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *osm = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i,bsz;
41   PetscBool      iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
50     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
51     if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof blocks,"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);}
52     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
53     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
55     if (osm->same_local_solves) {
56       if (osm->ksp) {
57         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59         if (!rank) {
60           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
62           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63         }
64         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       }
66     } else {
67       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
68       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
72       for (i=0; i<osm->n_local; i++) {
73         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
74         if (i < osm->n_local_true) {
75           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
76           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
77           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
78           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
79         }
80         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
81       }
82       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
83       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
84     }
85   } else if (isstring) {
86     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
87     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
88     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
89     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
90   } else {
91     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 #undef __FUNCT__
97 #define __FUNCT__ "PCASMPrintSubdomains"
98 static PetscErrorCode PCASMPrintSubdomains(PC pc)
99 {
100   PC_ASM         *osm  = (PC_ASM*)pc->data;
101   const char     *prefix;
102   char           fname[PETSC_MAX_PATH_LEN+1];
103   PetscViewer    viewer;
104   PetscInt       i,j,nidx;
105   const PetscInt *idx;
106   PetscErrorCode ierr;
107 
108   PetscFunctionBegin;
109   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
110   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
111   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
112   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
113   for (i=0;i<osm->n_local_true;i++) {
114     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
115     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
116     for (j=0; j<nidx; j++) {
117       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
118     }
119     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
120     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
121     if (osm->is_local) {
122       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
123       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
124       for (j=0; j<nidx; j++) {
125         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
126       }
127       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
128       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
129     }
130   }
131   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
132   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "PCSetUp_ASM"
138 static PetscErrorCode PCSetUp_ASM(PC pc)
139 {
140   PC_ASM         *osm  = (PC_ASM*)pc->data;
141   PetscErrorCode ierr;
142   PetscBool      symset,flg;
143   PetscInt       i,m,m_local,firstRow,lastRow;
144   PetscMPIInt    size;
145   MatReuse       scall = MAT_REUSE_MATRIX;
146   IS             isl;
147   KSP            ksp;
148   PC             subpc;
149   const char     *prefix,*pprefix;
150   Vec            vec;
151 
152   PetscFunctionBegin;
153   if (!pc->setupcalled) {
154 
155     if (!osm->type_set) {
156       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
157       if (symset && flg) { osm->type = PC_ASM_BASIC; }
158     }
159 
160     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
161       /* no subdomains given, use one per processor */
162       osm->n_local = osm->n_local_true = 1;
163       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
164       osm->n = size;
165     } else if (osm->n == PETSC_DECIDE) {
166       /* determine global number of subdomains */
167       PetscInt inwork[2],outwork[2];
168       inwork[0] = inwork[1] = osm->n_local_true;
169       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
170       osm->n_local = outwork[0];
171       osm->n       = outwork[1];
172     }
173 
174     if (!osm->is){ /* create the index sets */
175       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
176     }
177     if (osm->n_local_true > 1 && !osm->is_local) {
178       ierr = PetscMalloc(osm->n_local_true*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
179       for (i=0; i<osm->n_local_true; i++) {
180         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
181           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
182           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
183         } else {
184           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
185           osm->is_local[i] = osm->is[i];
186         }
187       }
188     }
189     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
190     flg  = PETSC_FALSE;
191     ierr = PetscOptionsGetBool(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
192     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
193 
194     if (osm->overlap > 0) {
195       /* Extend the "overlapping" regions by a number of steps */
196       ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
197     }
198     if (osm->sort_indices) {
199       for (i=0; i<osm->n_local_true; i++) {
200         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
201         if (osm->is_local) {
202           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
203         }
204       }
205     }
206 
207     /* Create the local work vectors and scatter contexts */
208     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
209     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->restriction);CHKERRQ(ierr);
210     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->localization);CHKERRQ(ierr);}
211     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter),&osm->prolongation);CHKERRQ(ierr);
212     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->x);CHKERRQ(ierr);
213     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y);CHKERRQ(ierr);
214     ierr = PetscMalloc(osm->n_local*sizeof(Vec),&osm->y_local);CHKERRQ(ierr);
215     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
216     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
217       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
218       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
219       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
220       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
221       ierr = ISDestroy(isl);CHKERRQ(ierr);
222       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
223       if (osm->is_local) {
224         ISLocalToGlobalMapping ltog;
225         IS                     isll;
226         const PetscInt         *idx_local;
227         PetscInt               *idx,nout;
228 
229         ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);CHKERRQ(ierr);
230         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
231         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
232         ierr = PetscMalloc(m_local*sizeof(PetscInt),&idx);CHKERRQ(ierr);
233         ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr);
234         ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
235         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
236         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
237         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr);
238         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
239         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
240         ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
241         ierr = ISDestroy(isll);CHKERRQ(ierr);
242 
243         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
244         ierr = ISDestroy(isl);CHKERRQ(ierr);
245       } else {
246         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
247         osm->y_local[i] = osm->y[i];
248         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
249         osm->prolongation[i] = osm->restriction[i];
250         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
251       }
252     }
253     if (firstRow != lastRow) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
254     for (i=osm->n_local_true; i<osm->n_local; i++) {
255       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
256       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
257       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
258       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
259       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
260       if (osm->is_local) {
261         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
262         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
263       } else {
264         osm->prolongation[i] = osm->restriction[i];
265         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
266       }
267       ierr = ISDestroy(isl);CHKERRQ(ierr);
268     }
269     ierr = VecDestroy(vec);CHKERRQ(ierr);
270 
271     /* Create the local solvers */
272     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
273     for (i=0; i<osm->n_local_true; i++) {
274       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
275       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
276       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
277       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
278       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
279       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
280       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
281       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
282       osm->ksp[i] = ksp;
283     }
284     scall = MAT_INITIAL_MATRIX;
285 
286   } else {
287     /*
288        Destroy the blocks from the previous iteration
289     */
290     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
291       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
292       scall = MAT_INITIAL_MATRIX;
293     }
294   }
295 
296   /*
297      Extract out the submatrices
298   */
299   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
300   if (scall == MAT_INITIAL_MATRIX) {
301     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
302     for (i=0; i<osm->n_local_true; i++) {
303       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
304       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
305     }
306   }
307 
308   /* Return control to the user so that the submatrices can be modified (e.g., to apply
309      different boundary conditions for the submatrices than for the global problem) */
310   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
311 
312   /*
313      Loop over subdomains putting them into local ksp
314   */
315   for (i=0; i<osm->n_local_true; i++) {
316     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
317     if (!pc->setupcalled) {
318       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
319     }
320   }
321 
322   PetscFunctionReturn(0);
323 }
324 
325 #undef __FUNCT__
326 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
327 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
328 {
329   PC_ASM         *osm = (PC_ASM*)pc->data;
330   PetscErrorCode ierr;
331   PetscInt       i;
332 
333   PetscFunctionBegin;
334   for (i=0; i<osm->n_local_true; i++) {
335     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNCT__
341 #define __FUNCT__ "PCApply_ASM"
342 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
343 {
344   PC_ASM         *osm = (PC_ASM*)pc->data;
345   PetscErrorCode ierr;
346   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
347   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
348 
349   PetscFunctionBegin;
350   /*
351      Support for limiting the restriction or interpolation to only local
352      subdomain values (leaving the other values 0).
353   */
354   if (!(osm->type & PC_ASM_RESTRICT)) {
355     forward = SCATTER_FORWARD_LOCAL;
356     /* have to zero the work RHS since scatter may leave some slots empty */
357     for (i=0; i<n_local_true; i++) {
358       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
359     }
360   }
361   if (!(osm->type & PC_ASM_INTERPOLATE)) {
362     reverse = SCATTER_REVERSE_LOCAL;
363   }
364 
365   for (i=0; i<n_local; i++) {
366     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
367   }
368   ierr = VecZeroEntries(y);CHKERRQ(ierr);
369   /* do the local solves */
370   for (i=0; i<n_local_true; i++) {
371     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
372     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
373     if (osm->localization) {
374       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
375       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
376     }
377     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
378   }
379   /* handle the rest of the scatters that do not have local solves */
380   for (i=n_local_true; i<n_local; i++) {
381     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
382     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
383   }
384   for (i=0; i<n_local; i++) {
385     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
386   }
387   PetscFunctionReturn(0);
388 }
389 
390 #undef __FUNCT__
391 #define __FUNCT__ "PCApplyTranspose_ASM"
392 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
393 {
394   PC_ASM         *osm = (PC_ASM*)pc->data;
395   PetscErrorCode ierr;
396   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
397   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
398 
399   PetscFunctionBegin;
400   /*
401      Support for limiting the restriction or interpolation to only local
402      subdomain values (leaving the other values 0).
403 
404      Note: these are reversed from the PCApply_ASM() because we are applying the
405      transpose of the three terms
406   */
407   if (!(osm->type & PC_ASM_INTERPOLATE)) {
408     forward = SCATTER_FORWARD_LOCAL;
409     /* have to zero the work RHS since scatter may leave some slots empty */
410     for (i=0; i<n_local_true; i++) {
411       ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr);
412     }
413   }
414   if (!(osm->type & PC_ASM_RESTRICT)) {
415     reverse = SCATTER_REVERSE_LOCAL;
416   }
417 
418   for (i=0; i<n_local; i++) {
419     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
420   }
421   ierr = VecZeroEntries(y);CHKERRQ(ierr);
422   /* do the local solves */
423   for (i=0; i<n_local_true; i++) {
424     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
425     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
426     if (osm->localization) {
427       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
428       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
429     }
430     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
431   }
432   /* handle the rest of the scatters that do not have local solves */
433   for (i=n_local_true; i<n_local; i++) {
434     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
435     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
436   }
437   for (i=0; i<n_local; i++) {
438     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
439   }
440   PetscFunctionReturn(0);
441 }
442 
443 #undef __FUNCT__
444 #define __FUNCT__ "PCReset_ASM"
445 static PetscErrorCode PCReset_ASM(PC pc)
446 {
447   PC_ASM         *osm = (PC_ASM*)pc->data;
448   PetscErrorCode ierr;
449   PetscInt       i;
450 
451   PetscFunctionBegin;
452   if (osm->ksp) {
453     for (i=0; i<osm->n_local_true; i++) {
454       ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr);
455     }
456   }
457   if (osm->pmat) {
458     if (osm->n_local_true > 0) {
459       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
460     }
461   }
462   if (osm->restriction) {
463     for (i=0; i<osm->n_local; i++) {
464       ierr = VecScatterDestroy(osm->restriction[i]);CHKERRQ(ierr);
465       if (osm->localization) {ierr = VecScatterDestroy(osm->localization[i]);CHKERRQ(ierr);}
466       ierr = VecScatterDestroy(osm->prolongation[i]);CHKERRQ(ierr);
467       ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
468       ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
469       ierr = VecDestroy(osm->y_local[i]);CHKERRQ(ierr);
470     }
471     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
472     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
473     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
474     ierr = PetscFree(osm->x);CHKERRQ(ierr);
475     ierr = PetscFree(osm->y);CHKERRQ(ierr);
476     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
477   }
478   if (osm->is) {
479     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
480     osm->is       = 0;
481     osm->is_local = 0;
482   }
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "PCDestroy_ASM"
488 static PetscErrorCode PCDestroy_ASM(PC pc)
489 {
490   PC_ASM         *osm = (PC_ASM*)pc->data;
491   PetscErrorCode ierr;
492   PetscInt       i;
493 
494   PetscFunctionBegin;
495   ierr = PCReset_ASM(pc);CHKERRQ(ierr);
496   if (osm->ksp) {
497     for (i=0; i<osm->n_local_true; i++) {
498       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
499     }
500     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
501   }
502   ierr = PetscFree(pc->data);CHKERRQ(ierr);
503   PetscFunctionReturn(0);
504 }
505 
506 #undef __FUNCT__
507 #define __FUNCT__ "PCSetFromOptions_ASM"
508 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
509 {
510   PC_ASM         *osm = (PC_ASM*)pc->data;
511   PetscErrorCode ierr;
512   PetscInt       blocks,ovl;
513   PetscBool      symset,flg;
514   PCASMType      asmtype;
515 
516   PetscFunctionBegin;
517   /* set the type to symmetric if matrix is symmetric */
518   if (!osm->type_set && pc->pmat) {
519     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
520     if (symset && flg) { osm->type = PC_ASM_BASIC; }
521   }
522   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
523     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
524     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
525     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
526     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
527     flg  = PETSC_FALSE;
528     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
529     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
530   ierr = PetscOptionsTail();CHKERRQ(ierr);
531   PetscFunctionReturn(0);
532 }
533 
534 /*------------------------------------------------------------------------------------*/
535 
536 EXTERN_C_BEGIN
537 #undef __FUNCT__
538 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
539 PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
540 {
541   PC_ASM         *osm = (PC_ASM*)pc->data;
542   PetscErrorCode ierr;
543   PetscInt       i;
544 
545   PetscFunctionBegin;
546   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
547   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
548 
549   if (!pc->setupcalled) {
550     if (is) {
551       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
552     }
553     if (is_local) {
554       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
555     }
556     if (osm->is) {
557       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
558     }
559     osm->n_local_true = n;
560     osm->is           = 0;
561     osm->is_local     = 0;
562     if (is) {
563       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
564       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
565       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
566       osm->overlap = -1;
567     }
568     if (is_local) {
569       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
570       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
571     }
572   }
573   PetscFunctionReturn(0);
574 }
575 EXTERN_C_END
576 
577 EXTERN_C_BEGIN
578 #undef __FUNCT__
579 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
580 PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
581 {
582   PC_ASM         *osm = (PC_ASM*)pc->data;
583   PetscErrorCode ierr;
584   PetscMPIInt    rank,size;
585   PetscInt       n;
586 
587   PetscFunctionBegin;
588   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
589   if (is || is_local) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
590 
591   /*
592      Split the subdomains equally among all processors
593   */
594   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
595   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
596   n = N/size + ((N % size) > rank);
597   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
598   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
599   if (!pc->setupcalled) {
600     if (osm->is) {
601       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
602     }
603     osm->n_local_true = n;
604     osm->is           = 0;
605     osm->is_local     = 0;
606   }
607   PetscFunctionReturn(0);
608 }
609 EXTERN_C_END
610 
611 EXTERN_C_BEGIN
612 #undef __FUNCT__
613 #define __FUNCT__ "PCASMSetOverlap_ASM"
614 PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
615 {
616   PC_ASM *osm = (PC_ASM*)pc->data;
617 
618   PetscFunctionBegin;
619   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
620   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
621   if (!pc->setupcalled) {
622     osm->overlap = ovl;
623   }
624   PetscFunctionReturn(0);
625 }
626 EXTERN_C_END
627 
628 EXTERN_C_BEGIN
629 #undef __FUNCT__
630 #define __FUNCT__ "PCASMSetType_ASM"
631 PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
632 {
633   PC_ASM *osm = (PC_ASM*)pc->data;
634 
635   PetscFunctionBegin;
636   osm->type     = type;
637   osm->type_set = PETSC_TRUE;
638   PetscFunctionReturn(0);
639 }
640 EXTERN_C_END
641 
642 EXTERN_C_BEGIN
643 #undef __FUNCT__
644 #define __FUNCT__ "PCASMSetSortIndices_ASM"
645 PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
646 {
647   PC_ASM *osm = (PC_ASM*)pc->data;
648 
649   PetscFunctionBegin;
650   osm->sort_indices = doSort;
651   PetscFunctionReturn(0);
652 }
653 EXTERN_C_END
654 
655 EXTERN_C_BEGIN
656 #undef __FUNCT__
657 #define __FUNCT__ "PCASMGetSubKSP_ASM"
658 PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
659 {
660   PC_ASM         *osm = (PC_ASM*)pc->data;
661   PetscErrorCode ierr;
662 
663   PetscFunctionBegin;
664   if (osm->n_local_true < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
665 
666   if (n_local) {
667     *n_local = osm->n_local_true;
668   }
669   if (first_local) {
670     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
671     *first_local -= osm->n_local_true;
672   }
673   if (ksp) {
674     /* Assume that local solves are now different; not necessarily
675        true though!  This flag is used only for PCView_ASM() */
676     *ksp                   = osm->ksp;
677     osm->same_local_solves = PETSC_FALSE;
678   }
679   PetscFunctionReturn(0);
680 }
681 EXTERN_C_END
682 
683 
684 #undef __FUNCT__
685 #define __FUNCT__ "PCASMSetLocalSubdomains"
686 /*@C
687     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
688     only) for the additive Schwarz preconditioner.
689 
690     Collective on PC
691 
692     Input Parameters:
693 +   pc - the preconditioner context
694 .   n - the number of subdomains for this processor (default value = 1)
695 .   is - the index set that defines the subdomains for this processor
696          (or PETSC_NULL for PETSc to determine subdomains)
697 -   is_local - the index sets that define the local part of the subdomains for this processor
698          (or PETSC_NULL to use the default of 1 subdomain per process)
699 
700     Notes:
701     The IS numbering is in the parallel, global numbering of the vector.
702 
703     By default the ASM preconditioner uses 1 block per processor.
704 
705     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
706 
707     Level: advanced
708 
709 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
710 
711 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
712           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
713 @*/
714 PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
715 {
716   PetscErrorCode ierr;
717 
718   PetscFunctionBegin;
719   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
720   ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
721   PetscFunctionReturn(0);
722 }
723 
724 #undef __FUNCT__
725 #define __FUNCT__ "PCASMSetTotalSubdomains"
726 /*@C
727     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
728     additive Schwarz preconditioner.  Either all or no processors in the
729     PC communicator must call this routine, with the same index sets.
730 
731     Collective on PC
732 
733     Input Parameters:
734 +   pc - the preconditioner context
735 .   n - the number of subdomains for all processors
736 .   is - the index sets that define the subdomains for all processor
737          (or PETSC_NULL for PETSc to determine subdomains)
738 -   is_local - the index sets that define the local part of the subdomains for this processor
739          (or PETSC_NULL to use the default of 1 subdomain per process)
740 
741     Options Database Key:
742     To set the total number of subdomain blocks rather than specify the
743     index sets, use the option
744 .    -pc_asm_blocks <blks> - Sets total blocks
745 
746     Notes:
747     Currently you cannot use this to set the actual subdomains with the argument is.
748 
749     By default the ASM preconditioner uses 1 block per processor.
750 
751     These index sets cannot be destroyed until after completion of the
752     linear solves for which the ASM preconditioner is being used.
753 
754     Use PCASMSetLocalSubdomains() to set local subdomains.
755 
756     Level: advanced
757 
758 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
759 
760 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
761           PCASMCreateSubdomains2D()
762 @*/
763 PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
764 {
765   PetscErrorCode ierr;
766 
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
769   ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr);
770   PetscFunctionReturn(0);
771 }
772 
773 #undef __FUNCT__
774 #define __FUNCT__ "PCASMSetOverlap"
775 /*@
776     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
777     additive Schwarz preconditioner.  Either all or no processors in the
778     PC communicator must call this routine.
779 
780     Logically Collective on PC
781 
782     Input Parameters:
783 +   pc  - the preconditioner context
784 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
785 
786     Options Database Key:
787 .   -pc_asm_overlap <ovl> - Sets overlap
788 
789     Notes:
790     By default the ASM preconditioner uses 1 block per processor.  To use
791     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
792     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
793 
794     The overlap defaults to 1, so if one desires that no additional
795     overlap be computed beyond what may have been set with a call to
796     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
797     must be set to be 0.  In particular, if one does not explicitly set
798     the subdomains an application code, then all overlap would be computed
799     internally by PETSc, and using an overlap of 0 would result in an ASM
800     variant that is equivalent to the block Jacobi preconditioner.
801 
802     Note that one can define initial index sets with any overlap via
803     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
804     PCASMSetOverlap() merely allows PETSc to extend that overlap further
805     if desired.
806 
807     Level: intermediate
808 
809 .keywords: PC, ASM, set, overlap
810 
811 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
812           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
813 @*/
814 PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
815 {
816   PetscErrorCode ierr;
817 
818   PetscFunctionBegin;
819   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
820   PetscValidLogicalCollectiveInt(pc,ovl,2);
821   ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
822   PetscFunctionReturn(0);
823 }
824 
825 #undef __FUNCT__
826 #define __FUNCT__ "PCASMSetType"
827 /*@
828     PCASMSetType - Sets the type of restriction and interpolation used
829     for local problems in the additive Schwarz method.
830 
831     Logically Collective on PC
832 
833     Input Parameters:
834 +   pc  - the preconditioner context
835 -   type - variant of ASM, one of
836 .vb
837       PC_ASM_BASIC       - full interpolation and restriction
838       PC_ASM_RESTRICT    - full restriction, local processor interpolation
839       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
840       PC_ASM_NONE        - local processor restriction and interpolation
841 .ve
842 
843     Options Database Key:
844 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
845 
846     Level: intermediate
847 
848 .keywords: PC, ASM, set, type
849 
850 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
851           PCASMCreateSubdomains2D()
852 @*/
853 PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
854 {
855   PetscErrorCode ierr;
856 
857   PetscFunctionBegin;
858   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
859   PetscValidLogicalCollectiveEnum(pc,type,2);
860   ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 #undef __FUNCT__
865 #define __FUNCT__ "PCASMSetSortIndices"
866 /*@
867     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
868 
869     Logically Collective on PC
870 
871     Input Parameters:
872 +   pc  - the preconditioner context
873 -   doSort - sort the subdomain indices
874 
875     Level: intermediate
876 
877 .keywords: PC, ASM, set, type
878 
879 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
880           PCASMCreateSubdomains2D()
881 @*/
882 PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool  doSort)
883 {
884   PetscErrorCode ierr;
885 
886   PetscFunctionBegin;
887   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
888   PetscValidLogicalCollectiveBool(pc,doSort,2);
889   ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "PCASMGetSubKSP"
895 /*@C
896    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
897    this processor.
898 
899    Collective on PC iff first_local is requested
900 
901    Input Parameter:
902 .  pc - the preconditioner context
903 
904    Output Parameters:
905 +  n_local - the number of blocks on this processor or PETSC_NULL
906 .  first_local - the global number of the first block on this processor or PETSC_NULL,
907                  all processors must request or all must pass PETSC_NULL
908 -  ksp - the array of KSP contexts
909 
910    Note:
911    After PCASMGetSubKSP() the array of KSPes is not to be freed
912 
913    Currently for some matrix implementations only 1 block per processor
914    is supported.
915 
916    You must call KSPSetUp() before calling PCASMGetSubKSP().
917 
918    Level: advanced
919 
920 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
921 
922 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
923           PCASMCreateSubdomains2D(),
924 @*/
925 PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
926 {
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
931   ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 /* -------------------------------------------------------------------------------------*/
936 /*MC
937    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
938            its own KSP object.
939 
940    Options Database Keys:
941 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
942 .  -pc_asm_blocks <blks> - Sets total blocks
943 .  -pc_asm_overlap <ovl> - Sets overlap
944 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
945 
946      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
947       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
948       -pc_asm_type basic to use the standard ASM.
949 
950    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
951      than one processor. Defaults to one block per processor.
952 
953      To set options on the solvers for each block append -sub_ to all the KSP, and PC
954         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
955 
956      To set the options on the solvers separate for each block call PCASMGetSubKSP()
957          and set the options directly on the resulting KSP object (you can access its PC
958          with KSPGetPC())
959 
960 
961    Level: beginner
962 
963    Concepts: additive Schwarz method
964 
965     References:
966     An additive variant of the Schwarz alternating method for the case of many subregions
967     M Dryja, OB Widlund - Courant Institute, New York University Technical report
968 
969     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
970     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
971 
972 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
973            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
974            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
975 
976 M*/
977 
978 EXTERN_C_BEGIN
979 #undef __FUNCT__
980 #define __FUNCT__ "PCCreate_ASM"
981 PetscErrorCode  PCCreate_ASM(PC pc)
982 {
983   PetscErrorCode ierr;
984   PC_ASM         *osm;
985 
986   PetscFunctionBegin;
987   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
988   osm->n                 = PETSC_DECIDE;
989   osm->n_local           = 0;
990   osm->n_local_true      = 0;
991   osm->overlap           = 1;
992   osm->ksp               = 0;
993   osm->restriction       = 0;
994   osm->localization      = 0;
995   osm->prolongation      = 0;
996   osm->x                 = 0;
997   osm->y                 = 0;
998   osm->y_local           = 0;
999   osm->is                = 0;
1000   osm->is_local          = 0;
1001   osm->mat               = 0;
1002   osm->pmat              = 0;
1003   osm->type              = PC_ASM_RESTRICT;
1004   osm->same_local_solves = PETSC_TRUE;
1005   osm->sort_indices      = PETSC_TRUE;
1006 
1007   pc->data                   = (void*)osm;
1008   pc->ops->apply             = PCApply_ASM;
1009   pc->ops->applytranspose    = PCApplyTranspose_ASM;
1010   pc->ops->setup             = PCSetUp_ASM;
1011   pc->ops->reset             = PCReset_ASM;
1012   pc->ops->destroy           = PCDestroy_ASM;
1013   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
1014   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
1015   pc->ops->view              = PCView_ASM;
1016   pc->ops->applyrichardson   = 0;
1017 
1018   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1019                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1020   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1021                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1022   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1023                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1024   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1025                     PCASMSetType_ASM);CHKERRQ(ierr);
1026   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1027                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1028   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1029                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1030   PetscFunctionReturn(0);
1031 }
1032 EXTERN_C_END
1033 
1034 
1035 #undef __FUNCT__
1036 #define __FUNCT__ "PCASMCreateSubdomains"
1037 /*@C
1038    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1039    preconditioner for a any problem on a general grid.
1040 
1041    Collective
1042 
1043    Input Parameters:
1044 +  A - The global matrix operator
1045 -  n - the number of local blocks
1046 
1047    Output Parameters:
1048 .  outis - the array of index sets defining the subdomains
1049 
1050    Level: advanced
1051 
1052    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1053     from these if you use PCASMSetLocalSubdomains()
1054 
1055     In the Fortran version you must provide the array outis[] already allocated of length n.
1056 
1057 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1058 
1059 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1060 @*/
1061 PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1062 {
1063   MatPartitioning           mpart;
1064   const char                *prefix;
1065   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1066   PetscMPIInt               size;
1067   PetscInt                  i,j,rstart,rend,bs;
1068   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1069   Mat                       Ad = PETSC_NULL, adj;
1070   IS                        ispart,isnumb,*is;
1071   PetscErrorCode            ierr;
1072 
1073   PetscFunctionBegin;
1074   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1075   PetscValidPointer(outis,3);
1076   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1077 
1078   /* Get prefix, row distribution, and block size */
1079   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1080   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1081   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1082   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1083 
1084   /* Get diagonal block from matrix if possible */
1085   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1086   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1087   if (f) {
1088     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1089   } else if (size == 1) {
1090     iscopy = PETSC_FALSE; Ad = A;
1091   } else {
1092     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1093   }
1094   if (Ad) {
1095     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1096     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1097   }
1098   if (Ad && n > 1) {
1099     PetscBool  match,done;
1100     /* Try to setup a good matrix partitioning if available */
1101     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1102     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1103     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1104     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1105     if (!match) {
1106       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1107     }
1108     if (!match) { /* assume a "good" partitioner is available */
1109       PetscInt na,*ia,*ja;
1110       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1111       if (done) {
1112 	/* Build adjacency matrix by hand. Unfortunately a call to
1113 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1114 	   remove the block-aij structure and we cannot expect
1115 	   MatPartitioning to split vertices as we need */
1116 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1117 	nnz = 0;
1118 	for (i=0; i<na; i++) { /* count number of nonzeros */
1119 	  len = ia[i+1] - ia[i];
1120 	  row = ja + ia[i];
1121 	  for (j=0; j<len; j++) {
1122 	    if (row[j] == i) { /* don't count diagonal */
1123 	      len--; break;
1124 	    }
1125 	  }
1126 	  nnz += len;
1127 	}
1128 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1129 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1130 	nnz    = 0;
1131 	iia[0] = 0;
1132 	for (i=0; i<na; i++) { /* fill adjacency */
1133 	  cnt = 0;
1134 	  len = ia[i+1] - ia[i];
1135 	  row = ja + ia[i];
1136 	  for (j=0; j<len; j++) {
1137 	    if (row[j] != i) { /* if not diagonal */
1138 	      jja[nnz+cnt++] = row[j];
1139 	    }
1140 	  }
1141 	  nnz += cnt;
1142 	  iia[i+1] = nnz;
1143 	}
1144 	/* Partitioning of the adjacency matrix */
1145 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1146 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1147 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1148 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1149 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1150 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1151 	foundpart = PETSC_TRUE;
1152       }
1153       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1154     }
1155     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1156   }
1157   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1158 
1159   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1160   *outis = is;
1161 
1162   if (!foundpart) {
1163 
1164     /* Partitioning by contiguous chunks of rows */
1165 
1166     PetscInt mbs   = (rend-rstart)/bs;
1167     PetscInt start = rstart;
1168     for (i=0; i<n; i++) {
1169       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1170       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1171       start += count;
1172     }
1173 
1174   } else {
1175 
1176     /* Partitioning by adjacency of diagonal block  */
1177 
1178     const PetscInt *numbering;
1179     PetscInt       *count,nidx,*indices,*newidx,start=0;
1180     /* Get node count in each partition */
1181     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1182     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1183     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1184       for (i=0; i<n; i++) count[i] *= bs;
1185     }
1186     /* Build indices from node numbering */
1187     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1188     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1189     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1190     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1191     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1192     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1193     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1194       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1195       for (i=0; i<nidx; i++)
1196 	for (j=0; j<bs; j++)
1197 	  newidx[i*bs+j] = indices[i]*bs + j;
1198       ierr = PetscFree(indices);CHKERRQ(ierr);
1199       nidx   *= bs;
1200       indices = newidx;
1201     }
1202     /* Shift to get global indices */
1203     for (i=0; i<nidx; i++) indices[i] += rstart;
1204 
1205     /* Build the index sets for each block */
1206     for (i=0; i<n; i++) {
1207       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1208       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1209       start += count[i];
1210     }
1211 
1212     ierr = PetscFree(count);
1213     ierr = PetscFree(indices);
1214     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1215     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1216 
1217   }
1218 
1219   PetscFunctionReturn(0);
1220 }
1221 
1222 #undef __FUNCT__
1223 #define __FUNCT__ "PCASMDestroySubdomains"
1224 /*@C
1225    PCASMDestroySubdomains - Destroys the index sets created with
1226    PCASMCreateSubdomains(). Should be called after setting subdomains
1227    with PCASMSetLocalSubdomains().
1228 
1229    Collective
1230 
1231    Input Parameters:
1232 +  n - the number of index sets
1233 .  is - the array of index sets
1234 -  is_local - the array of local index sets, can be PETSC_NULL
1235 
1236    Level: advanced
1237 
1238 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1239 
1240 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1241 @*/
1242 PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1243 {
1244   PetscInt       i;
1245   PetscErrorCode ierr;
1246   PetscFunctionBegin;
1247   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1248   PetscValidPointer(is,2);
1249   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1250   ierr = PetscFree(is);CHKERRQ(ierr);
1251   if (is_local) {
1252     PetscValidPointer(is_local,3);
1253     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1254     ierr = PetscFree(is_local);CHKERRQ(ierr);
1255   }
1256   PetscFunctionReturn(0);
1257 }
1258 
1259 #undef __FUNCT__
1260 #define __FUNCT__ "PCASMCreateSubdomains2D"
1261 /*@
1262    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1263    preconditioner for a two-dimensional problem on a regular grid.
1264 
1265    Not Collective
1266 
1267    Input Parameters:
1268 +  m, n - the number of mesh points in the x and y directions
1269 .  M, N - the number of subdomains in the x and y directions
1270 .  dof - degrees of freedom per node
1271 -  overlap - overlap in mesh lines
1272 
1273    Output Parameters:
1274 +  Nsub - the number of subdomains created
1275 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1276 -  is_local - array of index sets defining non-overlapping subdomains
1277 
1278    Note:
1279    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1280    preconditioners.  More general related routines are
1281    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1282 
1283    Level: advanced
1284 
1285 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1286 
1287 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1288           PCASMSetOverlap()
1289 @*/
1290 PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1291 {
1292   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1293   PetscErrorCode ierr;
1294   PetscInt       nidx,*idx,loc,ii,jj,count;
1295 
1296   PetscFunctionBegin;
1297   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1298 
1299   *Nsub = N*M;
1300   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1301   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1302   ystart = 0;
1303   loc_outer = 0;
1304   for (i=0; i<N; i++) {
1305     height = n/N + ((n % N) > i); /* height of subdomain */
1306     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1307     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1308     yright = ystart + height + overlap; if (yright > n) yright = n;
1309     xstart = 0;
1310     for (j=0; j<M; j++) {
1311       width = m/M + ((m % M) > j); /* width of subdomain */
1312       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1313       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1314       xright = xstart + width + overlap; if (xright > m) xright = m;
1315       nidx   = (xright - xleft)*(yright - yleft);
1316       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1317       loc    = 0;
1318       for (ii=yleft; ii<yright; ii++) {
1319         count = m*ii + xleft;
1320         for (jj=xleft; jj<xright; jj++) {
1321           idx[loc++] = count++;
1322         }
1323       }
1324       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1325       if (overlap == 0) {
1326         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1327         (*is_local)[loc_outer] = (*is)[loc_outer];
1328       } else {
1329         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1330           for (jj=xstart; jj<xstart+width; jj++) {
1331             idx[loc++] = m*ii + jj;
1332           }
1333         }
1334         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1335       }
1336       ierr = PetscFree(idx);CHKERRQ(ierr);
1337       xstart += width;
1338       loc_outer++;
1339     }
1340     ystart += height;
1341   }
1342   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 #undef __FUNCT__
1347 #define __FUNCT__ "PCASMGetLocalSubdomains"
1348 /*@C
1349     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1350     only) for the additive Schwarz preconditioner.
1351 
1352     Not Collective
1353 
1354     Input Parameter:
1355 .   pc - the preconditioner context
1356 
1357     Output Parameters:
1358 +   n - the number of subdomains for this processor (default value = 1)
1359 .   is - the index sets that define the subdomains for this processor
1360 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1361 
1362 
1363     Notes:
1364     The IS numbering is in the parallel, global numbering of the vector.
1365 
1366     Level: advanced
1367 
1368 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1369 
1370 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1371           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1372 @*/
1373 PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1374 {
1375   PC_ASM         *osm;
1376   PetscErrorCode ierr;
1377   PetscBool      match;
1378 
1379   PetscFunctionBegin;
1380   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1381   PetscValidIntPointer(n,2);
1382   if (is) PetscValidPointer(is,3);
1383   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1384   if (!match) {
1385     if (n)  *n  = 0;
1386     if (is) *is = PETSC_NULL;
1387   } else {
1388     osm = (PC_ASM*)pc->data;
1389     if (n)  *n  = osm->n_local_true;
1390     if (is) *is = osm->is;
1391     if (is_local) *is_local = osm->is_local;
1392   }
1393   PetscFunctionReturn(0);
1394 }
1395 
1396 #undef __FUNCT__
1397 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1398 /*@C
1399     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1400     only) for the additive Schwarz preconditioner.
1401 
1402     Not Collective
1403 
1404     Input Parameter:
1405 .   pc - the preconditioner context
1406 
1407     Output Parameters:
1408 +   n - the number of matrices for this processor (default value = 1)
1409 -   mat - the matrices
1410 
1411 
1412     Level: advanced
1413 
1414 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1415 
1416 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1417           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1418 @*/
1419 PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1420 {
1421   PC_ASM         *osm;
1422   PetscErrorCode ierr;
1423   PetscBool      match;
1424 
1425   PetscFunctionBegin;
1426   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1427   PetscValidIntPointer(n,2);
1428   if (mat) PetscValidPointer(mat,3);
1429   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1430   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1431   if (!match) {
1432     if (n)   *n   = 0;
1433     if (mat) *mat = PETSC_NULL;
1434   } else {
1435     osm = (PC_ASM*)pc->data;
1436     if (n)   *n   = osm->n_local_true;
1437     if (mat) *mat = osm->pmat;
1438   }
1439   PetscFunctionReturn(0);
1440 }
1441