xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision 880770e9add65dd66920a85b6e4c93b7123f3389)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscTruth sort_indices;        /* flag to sort subdomain indices */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *osm = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i,bsz;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     if (osm->n > 0) {
50       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",osm->n,osm->overlap);CHKERRQ(ierr);
51     } else {
52       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",osm->overlap);CHKERRQ(ierr);
53     }
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
56     if (osm->same_local_solves) {
57       if (osm->ksp) {
58         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
59         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
60         if (!rank) {
61           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
62           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
63           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
64         }
65         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
66       }
67     } else {
68       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
69       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
72       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
73       for (i=0; i<osm->n_local; i++) {
74         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
75         if (i < osm->n_local_true) {
76           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
78           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
79           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80         }
81         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       }
83       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
84       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
85     }
86   } else if (isstring) {
87     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
88     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
89     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
90     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
91   } else {
92     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "PCASMPrintSubdomains"
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm  = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer;
105   PetscInt       i,j,nidx;
106   const PetscInt *idx;
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
111   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
112   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
113   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
114   for (i=0;i<osm->n_local_true;i++) {
115     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
116     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
117     for (j=0; j<nidx; j++) {
118       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
119     }
120     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
121     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
122     if (osm->is_local) {
123       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
125       for (j=0; j<nidx; j++) {
126         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
127       }
128       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
129       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
130     }
131   }
132   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
133   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "PCSetUp_ASM"
139 static PetscErrorCode PCSetUp_ASM(PC pc)
140 {
141   PC_ASM         *osm  = (PC_ASM*)pc->data;
142   PetscErrorCode ierr;
143   PetscTruth     symset,flg;
144   PetscInt       i,m,m_local,firstRow,lastRow;
145   PetscMPIInt    size;
146   MatReuse       scall = MAT_REUSE_MATRIX;
147   IS             isl;
148   KSP            ksp;
149   PC             subpc;
150   const char     *prefix,*pprefix;
151   Vec            vec;
152 
153   PetscFunctionBegin;
154   if (!pc->setupcalled) {
155 
156     if (!osm->type_set) {
157       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
158       if (symset && flg) { osm->type = PC_ASM_BASIC; }
159     }
160 
161     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
162       /* no subdomains given, use one per processor */
163       osm->n_local = osm->n_local_true = 1;
164       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
165       osm->n = size;
166     } else if (osm->n == PETSC_DECIDE) {
167       /* determine global number of subdomains */
168       PetscInt inwork[2],outwork[2];
169       inwork[0] = inwork[1] = osm->n_local_true;
170       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
171       osm->n_local = outwork[0];
172       osm->n       = outwork[1];
173     }
174 
175     if (!osm->is){ /* create the index sets */
176       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
177     }
178     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
179     flg  = PETSC_FALSE;
180     ierr = PetscOptionsGetTruth(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
181     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
182 
183     /*  Extend the "overlapping" regions by a number of steps  */
184     ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
185     if (osm->sort_indices) {
186       for (i=0; i<osm->n_local_true; i++) {
187         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
188         if (osm->is_local) {
189           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
190         }
191       }
192     }
193 
194     /* Create the local work vectors and scatter contexts */
195     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
196     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->restriction);CHKERRQ(ierr);
197     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->localization);CHKERRQ(ierr);}
198     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->prolongation);CHKERRQ(ierr);
199     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->x);CHKERRQ(ierr);
200     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y);CHKERRQ(ierr);
201     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y_local);CHKERRQ(ierr);
202     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
203     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
204       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
205       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
206       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
207       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
208       ierr = ISDestroy(isl);CHKERRQ(ierr);
209       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
210       if (osm->is_local) {
211         IS              isShift;
212         const PetscInt *idx_local;
213         PetscInt       *idx, j;
214 
215         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
216         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
217         ierr = PetscMalloc(m*sizeof(PetscInt), &idx);CHKERRQ(ierr);
218         for(j = 0; j < m_local; ++j) {
219           idx[j] = idx_local[j] - firstRow;
220         }
221         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
222         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,&isShift);CHKERRQ(ierr);
223         ierr = PetscFree(idx);CHKERRQ(ierr);
224         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
225         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
226         ierr = VecScatterCreate(osm->y[i],isShift,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
227         ierr = ISDestroy(isShift);CHKERRQ(ierr);
228 
229         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
230         ierr = ISDestroy(isl);CHKERRQ(ierr);
231       } else {
232         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
233         osm->y_local[i] = osm->y[i];
234         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
235         osm->prolongation[i] = osm->restriction[i];
236         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
237       }
238     }
239     if (firstRow != lastRow) SETERRQ2(PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
240     for (i=osm->n_local_true; i<osm->n_local; i++) {
241       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
242       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
243       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
244       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
245       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
246       if (osm->is_local) {
247         ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
248         ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
249       } else {
250         osm->prolongation[i] = osm->restriction[i];
251         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
252       }
253       ierr = ISDestroy(isl);CHKERRQ(ierr);
254     }
255     ierr = VecDestroy(vec);CHKERRQ(ierr);
256 
257     /* Create the local solvers */
258     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
259     for (i=0; i<osm->n_local_true; i++) {
260       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
261       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
262       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
263       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
264       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
265       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
266       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
267       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
268       osm->ksp[i] = ksp;
269     }
270     scall = MAT_INITIAL_MATRIX;
271 
272   } else {
273     /*
274        Destroy the blocks from the previous iteration
275     */
276     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
277       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
278       scall = MAT_INITIAL_MATRIX;
279     }
280   }
281 
282   /*
283      Extract out the submatrices
284   */
285   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
286   if (scall == MAT_INITIAL_MATRIX) {
287     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
288     for (i=0; i<osm->n_local_true; i++) {
289       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
290       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
291     }
292   }
293 
294   /* Return control to the user so that the submatrices can be modified (e.g., to apply
295      different boundary conditions for the submatrices than for the global problem) */
296   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
297 
298   /*
299      Loop over subdomains putting them into local ksp
300   */
301   for (i=0; i<osm->n_local_true; i++) {
302     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
303     if (!pc->setupcalled) {
304       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
305     }
306   }
307 
308   PetscFunctionReturn(0);
309 }
310 
311 #undef __FUNCT__
312 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
313 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
314 {
315   PC_ASM         *osm = (PC_ASM*)pc->data;
316   PetscErrorCode ierr;
317   PetscInt       i;
318 
319   PetscFunctionBegin;
320   for (i=0; i<osm->n_local_true; i++) {
321     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
322   }
323   PetscFunctionReturn(0);
324 }
325 
326 #undef __FUNCT__
327 #define __FUNCT__ "PCApply_ASM"
328 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
329 {
330   PC_ASM         *osm = (PC_ASM*)pc->data;
331   PetscErrorCode ierr;
332   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
333   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
334 
335   PetscFunctionBegin;
336   /*
337      Support for limiting the restriction or interpolation to only local
338      subdomain values (leaving the other values 0).
339   */
340   if (!(osm->type & PC_ASM_RESTRICT)) {
341     forward = SCATTER_FORWARD_LOCAL;
342     /* have to zero the work RHS since scatter may leave some slots empty */
343     for (i=0; i<n_local_true; i++) {
344       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
345     }
346   }
347   if (!(osm->type & PC_ASM_INTERPOLATE)) {
348     reverse = SCATTER_REVERSE_LOCAL;
349   }
350 
351   for (i=0; i<n_local; i++) {
352     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
353   }
354   ierr = VecSet(y,0.0);CHKERRQ(ierr);
355   /* do the local solves */
356   for (i=0; i<n_local_true; i++) {
357     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
358     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
359     if (osm->localization) {
360       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
361       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
362     }
363     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
364   }
365   /* handle the rest of the scatters that do not have local solves */
366   for (i=n_local_true; i<n_local; i++) {
367     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
368     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
369   }
370   for (i=0; i<n_local; i++) {
371     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
372   }
373   PetscFunctionReturn(0);
374 }
375 
376 #undef __FUNCT__
377 #define __FUNCT__ "PCApplyTranspose_ASM"
378 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
379 {
380   PC_ASM         *osm = (PC_ASM*)pc->data;
381   PetscErrorCode ierr;
382   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
383   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
384 
385   PetscFunctionBegin;
386   /*
387      Support for limiting the restriction or interpolation to only local
388      subdomain values (leaving the other values 0).
389 
390      Note: these are reversed from the PCApply_ASM() because we are applying the
391      transpose of the three terms
392   */
393   if (!(osm->type & PC_ASM_INTERPOLATE)) {
394     forward = SCATTER_FORWARD_LOCAL;
395     /* have to zero the work RHS since scatter may leave some slots empty */
396     for (i=0; i<n_local_true; i++) {
397       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
398     }
399   }
400   if (!(osm->type & PC_ASM_RESTRICT)) {
401     reverse = SCATTER_REVERSE_LOCAL;
402   }
403 
404   for (i=0; i<n_local; i++) {
405     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
406   }
407   ierr = VecSet(y,0.0);CHKERRQ(ierr);
408   /* do the local solves */
409   for (i=0; i<n_local_true; i++) {
410     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
411     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
412     if (osm->localization) {
413       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
414       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
415     }
416     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
417   }
418   /* handle the rest of the scatters that do not have local solves */
419   for (i=n_local_true; i<n_local; i++) {
420     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
421     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
422   }
423   for (i=0; i<n_local; i++) {
424     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
425   }
426   PetscFunctionReturn(0);
427 }
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "PCDestroy_ASM"
431 static PetscErrorCode PCDestroy_ASM(PC pc)
432 {
433   PC_ASM         *osm = (PC_ASM*)pc->data;
434   PetscErrorCode ierr;
435   PetscInt       i;
436 
437   PetscFunctionBegin;
438   if (osm->ksp) {
439     for (i=0; i<osm->n_local_true; i++) {
440       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
441     }
442     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
443   }
444   if (osm->pmat) {
445     if (osm->n_local_true > 0) {
446       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
447     }
448   }
449   if (osm->restriction) {
450     for (i=0; i<osm->n_local; i++) {
451       ierr = VecScatterDestroy(osm->restriction[i]);CHKERRQ(ierr);
452       if (osm->localization) {ierr = VecScatterDestroy(osm->localization[i]);CHKERRQ(ierr);}
453       ierr = VecScatterDestroy(osm->prolongation[i]);CHKERRQ(ierr);
454       ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
455       ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
456       ierr = VecDestroy(osm->y_local[i]);CHKERRQ(ierr);
457     }
458     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
459     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
460     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
461     ierr = PetscFree(osm->x);CHKERRQ(ierr);
462     ierr = PetscFree(osm->y);CHKERRQ(ierr);
463     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
464   }
465   if (osm->is) {
466     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
467   }
468   ierr = PetscFree(osm);CHKERRQ(ierr);
469   PetscFunctionReturn(0);
470 }
471 
472 #undef __FUNCT__
473 #define __FUNCT__ "PCSetFromOptions_ASM"
474 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
475 {
476   PC_ASM         *osm = (PC_ASM*)pc->data;
477   PetscErrorCode ierr;
478   PetscInt       blocks,ovl;
479   PetscTruth     symset,flg;
480   PCASMType      asmtype;
481 
482   PetscFunctionBegin;
483   /* set the type to symmetric if matrix is symmetric */
484   if (!osm->type_set && pc->pmat) {
485     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
486     if (symset && flg) { osm->type = PC_ASM_BASIC; }
487   }
488   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
489     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
490     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
491     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
492     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
493     flg  = PETSC_FALSE;
494     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
495     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
496   ierr = PetscOptionsTail();CHKERRQ(ierr);
497   PetscFunctionReturn(0);
498 }
499 
500 /*------------------------------------------------------------------------------------*/
501 
502 EXTERN_C_BEGIN
503 #undef __FUNCT__
504 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
505 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
506 {
507   PC_ASM         *osm = (PC_ASM*)pc->data;
508   PetscErrorCode ierr;
509   PetscInt       i;
510 
511   PetscFunctionBegin;
512   if (n < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
513   if (pc->setupcalled && (n != osm->n_local_true || is)) {
514     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");
515   }
516   if (!pc->setupcalled) {
517     if (is) {
518       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
519     }
520     if (is_local) {
521       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
522     }
523     if (osm->is) {
524       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
525     }
526     osm->n_local_true = n;
527     osm->is           = 0;
528     osm->is_local     = 0;
529     if (is) {
530       ierr = PetscMalloc(n*sizeof(IS *),&osm->is);CHKERRQ(ierr);
531       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
532     }
533     if (is_local) {
534       ierr = PetscMalloc(n*sizeof(IS *),&osm->is_local);CHKERRQ(ierr);
535       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
536     }
537   }
538   PetscFunctionReturn(0);
539 }
540 EXTERN_C_END
541 
542 EXTERN_C_BEGIN
543 #undef __FUNCT__
544 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
545 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
546 {
547   PC_ASM         *osm = (PC_ASM*)pc->data;
548   PetscErrorCode ierr;
549   PetscMPIInt    rank,size;
550   PetscInt       n;
551 
552   PetscFunctionBegin;
553   if (N < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
554   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
555 
556   /*
557      Split the subdomains equally among all processors
558   */
559   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
560   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
561   n = N/size + ((N % size) > rank);
562   if (!n) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
563   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
564   if (!pc->setupcalled) {
565     if (osm->is) {
566       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
567     }
568     osm->n_local_true = n;
569     osm->is           = 0;
570     osm->is_local     = 0;
571   }
572   PetscFunctionReturn(0);
573 }
574 EXTERN_C_END
575 
576 EXTERN_C_BEGIN
577 #undef __FUNCT__
578 #define __FUNCT__ "PCASMSetOverlap_ASM"
579 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
580 {
581   PC_ASM *osm = (PC_ASM*)pc->data;
582 
583   PetscFunctionBegin;
584   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
585   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
586   if (!pc->setupcalled) {
587     osm->overlap = ovl;
588   }
589   PetscFunctionReturn(0);
590 }
591 EXTERN_C_END
592 
593 EXTERN_C_BEGIN
594 #undef __FUNCT__
595 #define __FUNCT__ "PCASMSetType_ASM"
596 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
597 {
598   PC_ASM *osm = (PC_ASM*)pc->data;
599 
600   PetscFunctionBegin;
601   osm->type     = type;
602   osm->type_set = PETSC_TRUE;
603   PetscFunctionReturn(0);
604 }
605 EXTERN_C_END
606 
607 EXTERN_C_BEGIN
608 #undef __FUNCT__
609 #define __FUNCT__ "PCASMSetSortIndices_ASM"
610 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices_ASM(PC pc,PetscTruth doSort)
611 {
612   PC_ASM *osm = (PC_ASM*)pc->data;
613 
614   PetscFunctionBegin;
615   osm->sort_indices = doSort;
616   PetscFunctionReturn(0);
617 }
618 EXTERN_C_END
619 
620 EXTERN_C_BEGIN
621 #undef __FUNCT__
622 #define __FUNCT__ "PCASMGetSubKSP_ASM"
623 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
624 {
625   PC_ASM         *osm = (PC_ASM*)pc->data;
626   PetscErrorCode ierr;
627 
628   PetscFunctionBegin;
629   if (osm->n_local_true < 1) {
630     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
631   }
632 
633   if (n_local) {
634     *n_local = osm->n_local_true;
635   }
636   if (first_local) {
637     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
638     *first_local -= osm->n_local_true;
639   }
640   if (ksp) {
641     /* Assume that local solves are now different; not necessarily
642        true though!  This flag is used only for PCView_ASM() */
643     *ksp                   = osm->ksp;
644     osm->same_local_solves = PETSC_FALSE;
645   }
646   PetscFunctionReturn(0);
647 }
648 EXTERN_C_END
649 
650 
651 #undef __FUNCT__
652 #define __FUNCT__ "PCASMSetLocalSubdomains"
653 /*@C
654     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
655     only) for the additive Schwarz preconditioner.
656 
657     Collective on PC
658 
659     Input Parameters:
660 +   pc - the preconditioner context
661 .   n - the number of subdomains for this processor (default value = 1)
662 .   is - the index sets that define the subdomains for this processor
663          (or PETSC_NULL for PETSc to determine subdomains)
664 -   is_local - the index sets that define the local part of the subdomains for this processor
665          (or PETSC_NULL to use the default of 1 subdomain per process)
666 
667     Notes:
668     The IS numbering is in the parallel, global numbering of the vector.
669 
670     By default the ASM preconditioner uses 1 block per processor.
671 
672     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
673 
674     Level: advanced
675 
676 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
677 
678 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
679           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
680 @*/
681 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
682 {
683   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
684 
685   PetscFunctionBegin;
686   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
687   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
688   if (f) {
689     ierr = (*f)(pc,n,is,is_local);CHKERRQ(ierr);
690   }
691   PetscFunctionReturn(0);
692 }
693 
694 #undef __FUNCT__
695 #define __FUNCT__ "PCASMSetTotalSubdomains"
696 /*@C
697     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
698     additive Schwarz preconditioner.  Either all or no processors in the
699     PC communicator must call this routine, with the same index sets.
700 
701     Collective on PC
702 
703     Input Parameters:
704 +   pc - the preconditioner context
705 .   n - the number of subdomains for all processors
706 .   is - the index sets that define the subdomains for all processor
707          (or PETSC_NULL for PETSc to determine subdomains)
708 -   is_local - the index sets that define the local part of the subdomains for this processor
709          (or PETSC_NULL to use the default of 1 subdomain per process)
710 
711     Options Database Key:
712     To set the total number of subdomain blocks rather than specify the
713     index sets, use the option
714 .    -pc_asm_blocks <blks> - Sets total blocks
715 
716     Notes:
717     Currently you cannot use this to set the actual subdomains with the argument is.
718 
719     By default the ASM preconditioner uses 1 block per processor.
720 
721     These index sets cannot be destroyed until after completion of the
722     linear solves for which the ASM preconditioner is being used.
723 
724     Use PCASMSetLocalSubdomains() to set local subdomains.
725 
726     Level: advanced
727 
728 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
729 
730 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
731           PCASMCreateSubdomains2D()
732 @*/
733 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
734 {
735   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
736 
737   PetscFunctionBegin;
738   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
739   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
740   if (f) {
741     ierr = (*f)(pc,N,is,is_local);CHKERRQ(ierr);
742   }
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "PCASMSetOverlap"
748 /*@
749     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
750     additive Schwarz preconditioner.  Either all or no processors in the
751     PC communicator must call this routine.
752 
753     Collective on PC
754 
755     Input Parameters:
756 +   pc  - the preconditioner context
757 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
758 
759     Options Database Key:
760 .   -pc_asm_overlap <ovl> - Sets overlap
761 
762     Notes:
763     By default the ASM preconditioner uses 1 block per processor.  To use
764     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
765     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
766 
767     The overlap defaults to 1, so if one desires that no additional
768     overlap be computed beyond what may have been set with a call to
769     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
770     must be set to be 0.  In particular, if one does not explicitly set
771     the subdomains an application code, then all overlap would be computed
772     internally by PETSc, and using an overlap of 0 would result in an ASM
773     variant that is equivalent to the block Jacobi preconditioner.
774 
775     Note that one can define initial index sets with any overlap via
776     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
777     PCASMSetOverlap() merely allows PETSc to extend that overlap further
778     if desired.
779 
780     Level: intermediate
781 
782 .keywords: PC, ASM, set, overlap
783 
784 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
785           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
786 @*/
787 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
788 {
789   PetscErrorCode ierr,(*f)(PC,PetscInt);
790 
791   PetscFunctionBegin;
792   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
793   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
794   if (f) {
795     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
796   }
797   PetscFunctionReturn(0);
798 }
799 
800 #undef __FUNCT__
801 #define __FUNCT__ "PCASMSetType"
802 /*@
803     PCASMSetType - Sets the type of restriction and interpolation used
804     for local problems in the additive Schwarz method.
805 
806     Collective on PC
807 
808     Input Parameters:
809 +   pc  - the preconditioner context
810 -   type - variant of ASM, one of
811 .vb
812       PC_ASM_BASIC       - full interpolation and restriction
813       PC_ASM_RESTRICT    - full restriction, local processor interpolation
814       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
815       PC_ASM_NONE        - local processor restriction and interpolation
816 .ve
817 
818     Options Database Key:
819 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
820 
821     Level: intermediate
822 
823 .keywords: PC, ASM, set, type
824 
825 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
826           PCASMCreateSubdomains2D()
827 @*/
828 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
829 {
830   PetscErrorCode ierr,(*f)(PC,PCASMType);
831 
832   PetscFunctionBegin;
833   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
834   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
835   if (f) {
836     ierr = (*f)(pc,type);CHKERRQ(ierr);
837   }
838   PetscFunctionReturn(0);
839 }
840 
841 #undef __FUNCT__
842 #define __FUNCT__ "PCASMSetSortIndices"
843 /*@
844     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
845 
846     Collective on PC
847 
848     Input Parameters:
849 +   pc  - the preconditioner context
850 -   doSort - sort the subdomain indices
851 
852     Level: intermediate
853 
854 .keywords: PC, ASM, set, type
855 
856 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
857           PCASMCreateSubdomains2D()
858 @*/
859 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices(PC pc,PetscTruth doSort)
860 {
861   PetscErrorCode ierr,(*f)(PC,PetscTruth);
862 
863   PetscFunctionBegin;
864   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
865   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetSortIndices_C",(void (**)(void))&f);CHKERRQ(ierr);
866   if (f) {
867     ierr = (*f)(pc,doSort);CHKERRQ(ierr);
868   }
869   PetscFunctionReturn(0);
870 }
871 
872 #undef __FUNCT__
873 #define __FUNCT__ "PCASMGetSubKSP"
874 /*@C
875    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
876    this processor.
877 
878    Collective on PC iff first_local is requested
879 
880    Input Parameter:
881 .  pc - the preconditioner context
882 
883    Output Parameters:
884 +  n_local - the number of blocks on this processor or PETSC_NULL
885 .  first_local - the global number of the first block on this processor or PETSC_NULL,
886                  all processors must request or all must pass PETSC_NULL
887 -  ksp - the array of KSP contexts
888 
889    Note:
890    After PCASMGetSubKSP() the array of KSPes is not to be freed
891 
892    Currently for some matrix implementations only 1 block per processor
893    is supported.
894 
895    You must call KSPSetUp() before calling PCASMGetSubKSP().
896 
897    Level: advanced
898 
899 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
900 
901 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
902           PCASMCreateSubdomains2D(),
903 @*/
904 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
905 {
906   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
907 
908   PetscFunctionBegin;
909   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
910   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
911   if (f) {
912     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
913   } else {
914     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
915   }
916 
917  PetscFunctionReturn(0);
918 }
919 
920 /* -------------------------------------------------------------------------------------*/
921 /*MC
922    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
923            its own KSP object.
924 
925    Options Database Keys:
926 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
927 .  -pc_asm_blocks <blks> - Sets total blocks
928 .  -pc_asm_overlap <ovl> - Sets overlap
929 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
930 
931      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
932       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
933       -pc_asm_type basic to use the standard ASM.
934 
935    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
936      than one processor. Defaults to one block per processor.
937 
938      To set options on the solvers for each block append -sub_ to all the KSP, and PC
939         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
940 
941      To set the options on the solvers separate for each block call PCASMGetSubKSP()
942          and set the options directly on the resulting KSP object (you can access its PC
943          with KSPGetPC())
944 
945 
946    Level: beginner
947 
948    Concepts: additive Schwarz method
949 
950     References:
951     An additive variant of the Schwarz alternating method for the case of many subregions
952     M Dryja, OB Widlund - Courant Institute, New York University Technical report
953 
954     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
955     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
956 
957 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
958            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
959            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
960 
961 M*/
962 
963 EXTERN_C_BEGIN
964 #undef __FUNCT__
965 #define __FUNCT__ "PCCreate_ASM"
966 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
967 {
968   PetscErrorCode ierr;
969   PC_ASM         *osm;
970 
971   PetscFunctionBegin;
972   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
973   osm->n                 = PETSC_DECIDE;
974   osm->n_local           = 0;
975   osm->n_local_true      = 0;
976   osm->overlap           = 1;
977   osm->ksp               = 0;
978   osm->restriction       = 0;
979   osm->localization      = 0;
980   osm->prolongation      = 0;
981   osm->x                 = 0;
982   osm->y                 = 0;
983   osm->y_local           = 0;
984   osm->is                = 0;
985   osm->is_local          = 0;
986   osm->mat               = 0;
987   osm->pmat              = 0;
988   osm->type              = PC_ASM_RESTRICT;
989   osm->same_local_solves = PETSC_TRUE;
990   osm->sort_indices      = PETSC_TRUE;
991 
992   pc->data                   = (void*)osm;
993   pc->ops->apply             = PCApply_ASM;
994   pc->ops->applytranspose    = PCApplyTranspose_ASM;
995   pc->ops->setup             = PCSetUp_ASM;
996   pc->ops->destroy           = PCDestroy_ASM;
997   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
998   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
999   pc->ops->view              = PCView_ASM;
1000   pc->ops->applyrichardson   = 0;
1001 
1002   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1003                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1004   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1005                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1006   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1007                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1008   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1009                     PCASMSetType_ASM);CHKERRQ(ierr);
1010   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1011                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1012   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1013                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1014   PetscFunctionReturn(0);
1015 }
1016 EXTERN_C_END
1017 
1018 
1019 #undef __FUNCT__
1020 #define __FUNCT__ "PCASMCreateSubdomains"
1021 /*@C
1022    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1023    preconditioner for a any problem on a general grid.
1024 
1025    Collective
1026 
1027    Input Parameters:
1028 +  A - The global matrix operator
1029 -  n - the number of local blocks
1030 
1031    Output Parameters:
1032 .  outis - the array of index sets defining the subdomains
1033 
1034    Level: advanced
1035 
1036    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1037     from these if you use PCASMSetLocalSubdomains()
1038 
1039     In the Fortran version you must provide the array outis[] already allocated of length n.
1040 
1041 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1042 
1043 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1044 @*/
1045 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1046 {
1047   MatPartitioning           mpart;
1048   const char                *prefix;
1049   PetscErrorCode            (*f)(Mat,PetscTruth*,MatReuse,Mat*);
1050   PetscMPIInt               size;
1051   PetscInt                  i,j,rstart,rend,bs;
1052   PetscTruth                iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1053   Mat                       Ad = PETSC_NULL, adj;
1054   IS                        ispart,isnumb,*is;
1055   PetscErrorCode            ierr;
1056 
1057   PetscFunctionBegin;
1058   PetscValidHeaderSpecific(A,MAT_COOKIE,1);
1059   PetscValidPointer(outis,3);
1060   if (n < 1) {SETERRQ1(PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);}
1061 
1062   /* Get prefix, row distribution, and block size */
1063   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1064   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1065   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1066   if (rstart/bs*bs != rstart || rend/bs*bs != rend) {
1067     SETERRQ3(PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1068   }
1069   /* Get diagonal block from matrix if possible */
1070   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1071   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1072   if (f) {
1073     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1074   } else if (size == 1) {
1075     iscopy = PETSC_FALSE; Ad = A;
1076   } else {
1077     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1078   }
1079   if (Ad) {
1080     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1081     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1082   }
1083   if (Ad && n > 1) {
1084     PetscTruth match,done;
1085     /* Try to setup a good matrix partitioning if available */
1086     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1087     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1088     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1089     ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_CURRENT,&match);CHKERRQ(ierr);
1090     if (!match) {
1091       ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_SQUARE,&match);CHKERRQ(ierr);
1092     }
1093     if (!match) { /* assume a "good" partitioner is available */
1094       PetscInt na,*ia,*ja;
1095       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1096       if (done) {
1097 	/* Build adjacency matrix by hand. Unfortunately a call to
1098 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1099 	   remove the block-aij structure and we cannot expect
1100 	   MatPartitioning to split vertices as we need */
1101 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1102 	nnz = 0;
1103 	for (i=0; i<na; i++) { /* count number of nonzeros */
1104 	  len = ia[i+1] - ia[i];
1105 	  row = ja + ia[i];
1106 	  for (j=0; j<len; j++) {
1107 	    if (row[j] == i) { /* don't count diagonal */
1108 	      len--; break;
1109 	    }
1110 	  }
1111 	  nnz += len;
1112 	}
1113 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1114 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1115 	nnz    = 0;
1116 	iia[0] = 0;
1117 	for (i=0; i<na; i++) { /* fill adjacency */
1118 	  cnt = 0;
1119 	  len = ia[i+1] - ia[i];
1120 	  row = ja + ia[i];
1121 	  for (j=0; j<len; j++) {
1122 	    if (row[j] != i) { /* if not diagonal */
1123 	      jja[nnz+cnt++] = row[j];
1124 	    }
1125 	  }
1126 	  nnz += cnt;
1127 	  iia[i+1] = nnz;
1128 	}
1129 	/* Partitioning of the adjacency matrix */
1130 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1131 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1132 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1133 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1134 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1135 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1136 	foundpart = PETSC_TRUE;
1137       }
1138       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1139     }
1140     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1141   }
1142   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1143 
1144   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1145   *outis = is;
1146 
1147   if (!foundpart) {
1148 
1149     /* Partitioning by contiguous chunks of rows */
1150 
1151     PetscInt mbs   = (rend-rstart)/bs;
1152     PetscInt start = rstart;
1153     for (i=0; i<n; i++) {
1154       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1155       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1156       start += count;
1157     }
1158 
1159   } else {
1160 
1161     /* Partitioning by adjacency of diagonal block  */
1162 
1163     const PetscInt *numbering;
1164     PetscInt       *count,nidx,*indices,*newidx,start=0;
1165     /* Get node count in each partition */
1166     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1167     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1168     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1169       for (i=0; i<n; i++) count[i] *= bs;
1170     }
1171     /* Build indices from node numbering */
1172     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1173     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1174     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1175     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1176     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1177     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1178     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1179       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1180       for (i=0; i<nidx; i++)
1181 	for (j=0; j<bs; j++)
1182 	  newidx[i*bs+j] = indices[i]*bs + j;
1183       ierr = PetscFree(indices);CHKERRQ(ierr);
1184       nidx   *= bs;
1185       indices = newidx;
1186     }
1187     /* Shift to get global indices */
1188     for (i=0; i<nidx; i++) indices[i] += rstart;
1189 
1190     /* Build the index sets for each block */
1191     for (i=0; i<n; i++) {
1192       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],&is[i]);CHKERRQ(ierr);
1193       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1194       start += count[i];
1195     }
1196 
1197     ierr = PetscFree(count);
1198     ierr = PetscFree(indices);
1199     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1200     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1201 
1202   }
1203 
1204   PetscFunctionReturn(0);
1205 }
1206 
1207 #undef __FUNCT__
1208 #define __FUNCT__ "PCASMDestroySubdomains"
1209 /*@C
1210    PCASMDestroySubdomains - Destroys the index sets created with
1211    PCASMCreateSubdomains(). Should be called after setting subdomains
1212    with PCASMSetLocalSubdomains().
1213 
1214    Collective
1215 
1216    Input Parameters:
1217 +  n - the number of index sets
1218 .  is - the array of index sets
1219 -  is_local - the array of local index sets, can be PETSC_NULL
1220 
1221    Level: advanced
1222 
1223 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1224 
1225 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1226 @*/
1227 PetscErrorCode PETSCKSP_DLLEXPORT PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1228 {
1229   PetscInt       i;
1230   PetscErrorCode ierr;
1231   PetscFunctionBegin;
1232   if (n <= 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1233   PetscValidPointer(is,2);
1234   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1235   ierr = PetscFree(is);CHKERRQ(ierr);
1236   if (is_local) {
1237     PetscValidPointer(is_local,3);
1238     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1239     ierr = PetscFree(is_local);CHKERRQ(ierr);
1240   }
1241   PetscFunctionReturn(0);
1242 }
1243 
1244 #undef __FUNCT__
1245 #define __FUNCT__ "PCASMCreateSubdomains2D"
1246 /*@
1247    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1248    preconditioner for a two-dimensional problem on a regular grid.
1249 
1250    Not Collective
1251 
1252    Input Parameters:
1253 +  m, n - the number of mesh points in the x and y directions
1254 .  M, N - the number of subdomains in the x and y directions
1255 .  dof - degrees of freedom per node
1256 -  overlap - overlap in mesh lines
1257 
1258    Output Parameters:
1259 +  Nsub - the number of subdomains created
1260 -  is - the array of index sets defining the subdomains
1261 
1262    Note:
1263    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1264    preconditioners.  More general related routines are
1265    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1266 
1267    Level: advanced
1268 
1269 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1270 
1271 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1272           PCASMSetOverlap()
1273 @*/
1274 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
1275 {
1276   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
1277   PetscErrorCode ierr;
1278   PetscInt       nidx,*idx,loc,ii,jj,count;
1279 
1280   PetscFunctionBegin;
1281   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
1282 
1283   *Nsub = N*M;
1284   ierr = PetscMalloc((*Nsub)*sizeof(IS *),is);CHKERRQ(ierr);
1285   ystart = 0;
1286   loc_outter = 0;
1287   for (i=0; i<N; i++) {
1288     height = n/N + ((n % N) > i); /* height of subdomain */
1289     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1290     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1291     yright = ystart + height + overlap; if (yright > n) yright = n;
1292     xstart = 0;
1293     for (j=0; j<M; j++) {
1294       width = m/M + ((m % M) > j); /* width of subdomain */
1295       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1296       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1297       xright = xstart + width + overlap; if (xright > m) xright = m;
1298       nidx   = (xright - xleft)*(yright - yleft);
1299       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1300       loc    = 0;
1301       for (ii=yleft; ii<yright; ii++) {
1302         count = m*ii + xleft;
1303         for (jj=xleft; jj<xright; jj++) {
1304           idx[loc++] = count++;
1305         }
1306       }
1307       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
1308       ierr = PetscFree(idx);CHKERRQ(ierr);
1309       xstart += width;
1310     }
1311     ystart += height;
1312   }
1313   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1314   PetscFunctionReturn(0);
1315 }
1316 
1317 #undef __FUNCT__
1318 #define __FUNCT__ "PCASMGetLocalSubdomains"
1319 /*@C
1320     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1321     only) for the additive Schwarz preconditioner.
1322 
1323     Collective on PC
1324 
1325     Input Parameter:
1326 .   pc - the preconditioner context
1327 
1328     Output Parameters:
1329 +   n - the number of subdomains for this processor (default value = 1)
1330 .   is - the index sets that define the subdomains for this processor
1331 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1332 
1333 
1334     Notes:
1335     The IS numbering is in the parallel, global numbering of the vector.
1336 
1337     Level: advanced
1338 
1339 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1340 
1341 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1342           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1343 @*/
1344 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1345 {
1346   PC_ASM         *osm;
1347   PetscErrorCode ierr;
1348   PetscTruth     match;
1349 
1350   PetscFunctionBegin;
1351   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1352   PetscValidIntPointer(n,2);
1353   if (is) PetscValidPointer(is,3);
1354   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1355   if (!match) {
1356     if (n)  *n  = 0;
1357     if (is) *is = PETSC_NULL;
1358   } else {
1359     osm = (PC_ASM*)pc->data;
1360     if (n)  *n  = osm->n_local_true;
1361     if (is) *is = osm->is;
1362     if (is_local) *is_local = osm->is_local;
1363   }
1364   PetscFunctionReturn(0);
1365 }
1366 
1367 #undef __FUNCT__
1368 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1369 /*@C
1370     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1371     only) for the additive Schwarz preconditioner.
1372 
1373     Collective on PC
1374 
1375     Input Parameter:
1376 .   pc - the preconditioner context
1377 
1378     Output Parameters:
1379 +   n - the number of matrices for this processor (default value = 1)
1380 -   mat - the matrices
1381 
1382 
1383     Level: advanced
1384 
1385 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1386 
1387 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1388           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1389 @*/
1390 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1391 {
1392   PC_ASM         *osm;
1393   PetscErrorCode ierr;
1394   PetscTruth     match;
1395 
1396   PetscFunctionBegin;
1397   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1398   PetscValidIntPointer(n,2);
1399   if (mat) PetscValidPointer(mat,3);
1400   if (!pc->setupcalled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1401   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1402   if (!match) {
1403     if (n)   *n   = 0;
1404     if (mat) *mat = PETSC_NULL;
1405   } else {
1406     osm = (PC_ASM*)pc->data;
1407     if (n)   *n   = osm->n_local_true;
1408     if (mat) *mat = osm->pmat;
1409   }
1410   PetscFunctionReturn(0);
1411 }
1412