xref: /petsc/src/ksp/pc/impls/asm/asm.c (revision ba8a68b80012e103e52d15749afb2a3a8a06877f)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an additive Schwarz preconditioner for any Mat implementation.
5 
6   Note that each processor may have any number of subdomains. But in order to
7   deal easily with the VecScatter(), we treat each processor as if it has the
8   same number of subdomains.
9 
10        n - total number of true subdomains on all processors
11        n_local_true - actual number of subdomains on this processor
12        n_local = maximum over all processors of n_local_true
13 */
14 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
15 
16 typedef struct {
17   PetscInt   n, n_local, n_local_true;
18   PetscInt   overlap;             /* overlap requested by user */
19   KSP        *ksp;                /* linear solvers for each block */
20   VecScatter *restriction;        /* mapping from global to subregion */
21   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
22   VecScatter *prolongation;       /* mapping from subregion to global */
23   Vec        *x,*y,*y_local;      /* work vectors */
24   IS         *is;                 /* index set that defines each overlapping subdomain */
25   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
26   Mat        *mat,*pmat;          /* mat is not currently used */
27   PCASMType  type;                /* use reduced interpolation, restriction or both */
28   PetscTruth type_set;            /* if user set this value (so won't change it for symmetric problems) */
29   PetscTruth same_local_solves;   /* flag indicating whether all local solvers are same */
30   PetscTruth sort_indices;        /* flag to sort subdomain indices */
31 } PC_ASM;
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "PCView_ASM"
35 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
36 {
37   PC_ASM         *osm = (PC_ASM*)pc->data;
38   PetscErrorCode ierr;
39   PetscMPIInt    rank;
40   PetscInt       i,bsz;
41   PetscTruth     iascii,isstring;
42   PetscViewer    sviewer;
43 
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr);
48   if (iascii) {
49     if (osm->n > 0) {
50       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks = %D, amount of overlap = %D\n",osm->n,osm->overlap);CHKERRQ(ierr);
51     } else {
52       ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: total subdomain blocks not yet set, amount of overlap = %D\n",osm->overlap);CHKERRQ(ierr);
53     }
54     ierr = PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr);
55     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
56     if (osm->same_local_solves) {
57       if (osm->ksp) {
58         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
59         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
60         if (!rank) {
61           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
62           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
63           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
64         }
65         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
66       }
67     } else {
68       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr);
69       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
71       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
72       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
73       for (i=0; i<osm->n_local; i++) {
74         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
75         if (i < osm->n_local_true) {
76           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
78           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
79           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
80         }
81         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
82       }
83       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
84       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
85     }
86   } else if (isstring) {
87     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr);
88     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
89     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
90     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
91   } else {
92     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for PCASM",((PetscObject)viewer)->type_name);
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "PCASMPrintSubdomains"
99 static PetscErrorCode PCASMPrintSubdomains(PC pc)
100 {
101   PC_ASM         *osm  = (PC_ASM*)pc->data;
102   const char     *prefix;
103   char           fname[PETSC_MAX_PATH_LEN+1];
104   PetscViewer    viewer;
105   PetscInt       i,j,nidx;
106   const PetscInt *idx;
107   PetscErrorCode ierr;
108 
109   PetscFunctionBegin;
110   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
111   ierr = PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
112   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
113   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
114   for (i=0;i<osm->n_local_true;i++) {
115     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
116     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
117     for (j=0; j<nidx; j++) {
118       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
119     }
120     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
121     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
122     if (osm->is_local) {
123       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
124       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
125       for (j=0; j<nidx; j++) {
126         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
127       }
128       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
129       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
130     }
131   }
132   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
133   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "PCSetUp_ASM"
139 static PetscErrorCode PCSetUp_ASM(PC pc)
140 {
141   PC_ASM         *osm  = (PC_ASM*)pc->data;
142   PetscErrorCode ierr;
143   PetscTruth     symset,flg;
144   PetscInt       i,m,m_local,firstRow,lastRow;
145   PetscMPIInt    size;
146   MatReuse       scall = MAT_REUSE_MATRIX;
147   IS             isl;
148   KSP            ksp;
149   PC             subpc;
150   const char     *prefix,*pprefix;
151   Vec            vec;
152 
153   PetscFunctionBegin;
154   if (!pc->setupcalled) {
155 
156     if (!osm->type_set) {
157       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
158       if (symset && flg) { osm->type = PC_ASM_BASIC; }
159     }
160 
161     if (osm->n == PETSC_DECIDE && osm->n_local_true < 1) {
162       /* no subdomains given, use one per processor */
163       osm->n_local = osm->n_local_true = 1;
164       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
165       osm->n = size;
166     } else if (osm->n == PETSC_DECIDE) {
167       /* determine global number of subdomains */
168       PetscInt inwork[2],outwork[2];
169       inwork[0] = inwork[1] = osm->n_local_true;
170       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
171       osm->n_local = outwork[0];
172       osm->n       = outwork[1];
173     }
174 
175     if (!osm->is){ /* create the index sets */
176       ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr);
177     }
178     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
179     flg  = PETSC_FALSE;
180     ierr = PetscOptionsGetTruth(prefix,"-pc_asm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
181     if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); }
182 
183     /*  Extend the "overlapping" regions by a number of steps  */
184     ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr);
185     if (osm->sort_indices) {
186       for (i=0; i<osm->n_local_true; i++) {
187         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
188         if (osm->is_local) {
189           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
190         }
191       }
192     }
193 
194     /* Create the local work vectors and scatter contexts */
195     ierr = MatGetVecs(pc->pmat,&vec,0);CHKERRQ(ierr);
196     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->restriction);CHKERRQ(ierr);
197     if (osm->is_local) {ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->localization);CHKERRQ(ierr);}
198     ierr = PetscMalloc(osm->n_local*sizeof(VecScatter *),&osm->prolongation);CHKERRQ(ierr);
199     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->x);CHKERRQ(ierr);
200     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y);CHKERRQ(ierr);
201     ierr = PetscMalloc(osm->n_local*sizeof(Vec *),&osm->y_local);CHKERRQ(ierr);
202     ierr = VecGetOwnershipRange(vec, &firstRow, &lastRow);CHKERRQ(ierr);
203     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
204       ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr);
205       ierr = VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);CHKERRQ(ierr);
206       ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr);
207       ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
208       ierr = ISDestroy(isl);CHKERRQ(ierr);
209       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
210       if (osm->is_local) {
211         IS              isShift;
212         const PetscInt *idx_local;
213         PetscInt       *idx, j;
214 
215         ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr);
216         ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
217         ierr = PetscMalloc(m*sizeof(PetscInt), &idx);CHKERRQ(ierr);
218         for(j = 0; j < m_local; ++j) {
219           idx[j] = idx_local[j] - firstRow;
220         }
221         ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr);
222         ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,&isShift);CHKERRQ(ierr);
223         ierr = PetscFree(idx);CHKERRQ(ierr);
224         ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr);
225         ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr);
226         ierr = VecScatterCreate(osm->y[i],isShift,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr);
227         ierr = ISDestroy(isShift);CHKERRQ(ierr);
228 
229         ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr);
230         ierr = ISDestroy(isl);CHKERRQ(ierr);
231       } else {
232         ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr);
233         osm->y_local[i] = osm->y[i];
234         ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr);
235         osm->prolongation[i] = osm->restriction[i];
236         ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
237       }
238     }
239     if (firstRow != lastRow) SETERRQ2(PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
240     for (i=osm->n_local_true; i<osm->n_local; i++) {
241       ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr);
242       ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr);
243       ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr);
244       ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr);
245       ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr);
246       osm->prolongation[i] = osm->restriction[i];
247       ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr);
248       ierr = ISDestroy(isl);CHKERRQ(ierr);
249     }
250     ierr = VecDestroy(vec);CHKERRQ(ierr);
251 
252     /* Create the local solvers */
253     ierr = PetscMalloc(osm->n_local_true*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
254     for (i=0; i<osm->n_local_true; i++) {
255       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
256       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
257       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
258       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
259       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
260       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
261       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
262       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
263       osm->ksp[i] = ksp;
264     }
265     scall = MAT_INITIAL_MATRIX;
266 
267   } else {
268     /*
269        Destroy the blocks from the previous iteration
270     */
271     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
272       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
273       scall = MAT_INITIAL_MATRIX;
274     }
275   }
276 
277   /*
278      Extract out the submatrices
279   */
280   ierr = MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr);
281   if (scall == MAT_INITIAL_MATRIX) {
282     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
283     for (i=0; i<osm->n_local_true; i++) {
284       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
285       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
286     }
287   }
288 
289   /* Return control to the user so that the submatrices can be modified (e.g., to apply
290      different boundary conditions for the submatrices than for the global problem) */
291   ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
292 
293   /*
294      Loop over subdomains putting them into local ksp
295   */
296   for (i=0; i<osm->n_local_true; i++) {
297     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
298     if (!pc->setupcalled) {
299       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
300     }
301   }
302 
303   PetscFunctionReturn(0);
304 }
305 
306 #undef __FUNCT__
307 #define __FUNCT__ "PCSetUpOnBlocks_ASM"
308 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
309 {
310   PC_ASM         *osm = (PC_ASM*)pc->data;
311   PetscErrorCode ierr;
312   PetscInt       i;
313 
314   PetscFunctionBegin;
315   for (i=0; i<osm->n_local_true; i++) {
316     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
317   }
318   PetscFunctionReturn(0);
319 }
320 
321 #undef __FUNCT__
322 #define __FUNCT__ "PCApply_ASM"
323 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
324 {
325   PC_ASM         *osm = (PC_ASM*)pc->data;
326   PetscErrorCode ierr;
327   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
328   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
329 
330   PetscFunctionBegin;
331   /*
332      Support for limiting the restriction or interpolation to only local
333      subdomain values (leaving the other values 0).
334   */
335   if (!(osm->type & PC_ASM_RESTRICT)) {
336     forward = SCATTER_FORWARD_LOCAL;
337     /* have to zero the work RHS since scatter may leave some slots empty */
338     for (i=0; i<n_local_true; i++) {
339       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
340     }
341   }
342   if (!(osm->type & PC_ASM_INTERPOLATE)) {
343     reverse = SCATTER_REVERSE_LOCAL;
344   }
345 
346   for (i=0; i<n_local; i++) {
347     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
348   }
349   ierr = VecSet(y,0.0);CHKERRQ(ierr);
350   /* do the local solves */
351   for (i=0; i<n_local_true; i++) {
352     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
353     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
354     if (osm->localization) {
355       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
356       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
357     }
358     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
359   }
360   /* handle the rest of the scatters that do not have local solves */
361   for (i=n_local_true; i<n_local; i++) {
362     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
363     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
364   }
365   for (i=0; i<n_local; i++) {
366     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
367   }
368   PetscFunctionReturn(0);
369 }
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "PCApplyTranspose_ASM"
373 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
374 {
375   PC_ASM         *osm = (PC_ASM*)pc->data;
376   PetscErrorCode ierr;
377   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
378   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
379 
380   PetscFunctionBegin;
381   /*
382      Support for limiting the restriction or interpolation to only local
383      subdomain values (leaving the other values 0).
384 
385      Note: these are reversed from the PCApply_ASM() because we are applying the
386      transpose of the three terms
387   */
388   if (!(osm->type & PC_ASM_INTERPOLATE)) {
389     forward = SCATTER_FORWARD_LOCAL;
390     /* have to zero the work RHS since scatter may leave some slots empty */
391     for (i=0; i<n_local_true; i++) {
392       ierr = VecSet(osm->x[i],0.0);CHKERRQ(ierr);
393     }
394   }
395   if (!(osm->type & PC_ASM_RESTRICT)) {
396     reverse = SCATTER_REVERSE_LOCAL;
397   }
398 
399   for (i=0; i<n_local; i++) {
400     ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
401   }
402   ierr = VecSet(y,0.0);CHKERRQ(ierr);
403   /* do the local solves */
404   for (i=0; i<n_local_true; i++) {
405     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
406     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
407     if (osm->localization) {
408       ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
409       ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr);
410     }
411     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
412   }
413   /* handle the rest of the scatters that do not have local solves */
414   for (i=n_local_true; i<n_local; i++) {
415     ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr);
416     ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
417   }
418   for (i=0; i<n_local; i++) {
419     ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr);
420   }
421   PetscFunctionReturn(0);
422 }
423 
424 #undef __FUNCT__
425 #define __FUNCT__ "PCDestroy_ASM"
426 static PetscErrorCode PCDestroy_ASM(PC pc)
427 {
428   PC_ASM         *osm = (PC_ASM*)pc->data;
429   PetscErrorCode ierr;
430   PetscInt       i;
431 
432   PetscFunctionBegin;
433   if (osm->ksp) {
434     for (i=0; i<osm->n_local_true; i++) {
435       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
436     }
437     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
438   }
439   if (osm->pmat) {
440     if (osm->n_local_true > 0) {
441       ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr);
442     }
443   }
444   if (osm->restriction) {
445     for (i=0; i<osm->n_local; i++) {
446       ierr = VecScatterDestroy(osm->restriction[i]);CHKERRQ(ierr);
447       if (osm->localization) {ierr = VecScatterDestroy(osm->localization[i]);CHKERRQ(ierr);}
448       ierr = VecScatterDestroy(osm->prolongation[i]);CHKERRQ(ierr);
449       ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
450       ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
451       ierr = VecDestroy(osm->y_local[i]);CHKERRQ(ierr);
452     }
453     ierr = PetscFree(osm->restriction);CHKERRQ(ierr);
454     if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);}
455     ierr = PetscFree(osm->prolongation);CHKERRQ(ierr);
456     ierr = PetscFree(osm->x);CHKERRQ(ierr);
457     ierr = PetscFree(osm->y);CHKERRQ(ierr);
458     ierr = PetscFree(osm->y_local);CHKERRQ(ierr);
459   }
460   if (osm->is) {
461     ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
462   }
463   ierr = PetscFree(osm);CHKERRQ(ierr);
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "PCSetFromOptions_ASM"
469 static PetscErrorCode PCSetFromOptions_ASM(PC pc)
470 {
471   PC_ASM         *osm = (PC_ASM*)pc->data;
472   PetscErrorCode ierr;
473   PetscInt       blocks,ovl;
474   PetscTruth     symset,flg;
475   PCASMType      asmtype;
476 
477   PetscFunctionBegin;
478   /* set the type to symmetric if matrix is symmetric */
479   if (!osm->type_set && pc->pmat) {
480     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
481     if (symset && flg) { osm->type = PC_ASM_BASIC; }
482   }
483   ierr = PetscOptionsHead("Additive Schwarz options");CHKERRQ(ierr);
484     ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
485     if (flg) {ierr = PCASMSetTotalSubdomains(pc,blocks,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); }
486     ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
487     if (flg) {ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
488     flg  = PETSC_FALSE;
489     ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr);
490     if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); }
491   ierr = PetscOptionsTail();CHKERRQ(ierr);
492   PetscFunctionReturn(0);
493 }
494 
495 /*------------------------------------------------------------------------------------*/
496 
497 EXTERN_C_BEGIN
498 #undef __FUNCT__
499 #define __FUNCT__ "PCASMSetLocalSubdomains_ASM"
500 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
501 {
502   PC_ASM         *osm = (PC_ASM*)pc->data;
503   PetscErrorCode ierr;
504   PetscInt       i;
505 
506   PetscFunctionBegin;
507   if (n < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
508   if (pc->setupcalled && (n != osm->n_local_true || is)) {
509     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetup().");
510   }
511   if (!pc->setupcalled) {
512     if (is) {
513       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
514     }
515     if (is_local) {
516       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
517     }
518     if (osm->is) {
519       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
520     }
521     osm->n_local_true = n;
522     osm->is           = 0;
523     osm->is_local     = 0;
524     if (is) {
525       ierr = PetscMalloc(n*sizeof(IS *),&osm->is);CHKERRQ(ierr);
526       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
527     }
528     if (is_local) {
529       ierr = PetscMalloc(n*sizeof(IS *),&osm->is_local);CHKERRQ(ierr);
530       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
531     }
532   }
533   PetscFunctionReturn(0);
534 }
535 EXTERN_C_END
536 
537 EXTERN_C_BEGIN
538 #undef __FUNCT__
539 #define __FUNCT__ "PCASMSetTotalSubdomains_ASM"
540 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is)
541 {
542   PC_ASM         *osm = (PC_ASM*)pc->data;
543   PetscErrorCode ierr;
544   PetscMPIInt    rank,size;
545   PetscInt       n;
546 
547   PetscFunctionBegin;
548   if (N < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
549   if (is) SETERRQ(PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");
550 
551   /*
552      Split the subdomains equally amoung all processors
553   */
554   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
555   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
556   n = N/size + ((N % size) > rank);
557   if (!n) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
558   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetup().");
559   if (!pc->setupcalled) {
560     if (osm->is) {
561       ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr);
562     }
563     osm->n_local_true = n;
564     osm->is           = 0;
565     osm->is_local     = 0;
566   }
567   PetscFunctionReturn(0);
568 }
569 EXTERN_C_END
570 
571 EXTERN_C_BEGIN
572 #undef __FUNCT__
573 #define __FUNCT__ "PCASMSetOverlap_ASM"
574 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
575 {
576   PC_ASM *osm = (PC_ASM*)pc->data;
577 
578   PetscFunctionBegin;
579   if (ovl < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
580   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetup().");
581   if (!pc->setupcalled) {
582     osm->overlap = ovl;
583   }
584   PetscFunctionReturn(0);
585 }
586 EXTERN_C_END
587 
588 EXTERN_C_BEGIN
589 #undef __FUNCT__
590 #define __FUNCT__ "PCASMSetType_ASM"
591 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType_ASM(PC pc,PCASMType type)
592 {
593   PC_ASM *osm = (PC_ASM*)pc->data;
594 
595   PetscFunctionBegin;
596   osm->type     = type;
597   osm->type_set = PETSC_TRUE;
598   PetscFunctionReturn(0);
599 }
600 EXTERN_C_END
601 
602 EXTERN_C_BEGIN
603 #undef __FUNCT__
604 #define __FUNCT__ "PCASMSetSortIndices_ASM"
605 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices_ASM(PC pc,PetscTruth doSort)
606 {
607   PC_ASM *osm = (PC_ASM*)pc->data;
608 
609   PetscFunctionBegin;
610   osm->sort_indices = doSort;
611   PetscFunctionReturn(0);
612 }
613 EXTERN_C_END
614 
615 EXTERN_C_BEGIN
616 #undef __FUNCT__
617 #define __FUNCT__ "PCASMGetSubKSP_ASM"
618 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
619 {
620   PC_ASM         *osm = (PC_ASM*)pc->data;
621   PetscErrorCode ierr;
622 
623   PetscFunctionBegin;
624   if (osm->n_local_true < 1) {
625     SETERRQ(PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
626   }
627 
628   if (n_local) {
629     *n_local = osm->n_local_true;
630   }
631   if (first_local) {
632     ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
633     *first_local -= osm->n_local_true;
634   }
635   if (ksp) {
636     /* Assume that local solves are now different; not necessarily
637        true though!  This flag is used only for PCView_ASM() */
638     *ksp                   = osm->ksp;
639     osm->same_local_solves = PETSC_FALSE;
640   }
641   PetscFunctionReturn(0);
642 }
643 EXTERN_C_END
644 
645 
646 #undef __FUNCT__
647 #define __FUNCT__ "PCASMSetLocalSubdomains"
648 /*@C
649     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor
650     only) for the additive Schwarz preconditioner.
651 
652     Collective on PC
653 
654     Input Parameters:
655 +   pc - the preconditioner context
656 .   n - the number of subdomains for this processor (default value = 1)
657 .   is - the index sets that define the subdomains for this processor
658          (or PETSC_NULL for PETSc to determine subdomains)
659 -   is_local - the index sets that define the local part of the subdomains for this processor
660          (or PETSC_NULL to use the default of 1 subdomain per process)
661 
662     Notes:
663     The IS numbering is in the parallel, global numbering of the vector.
664 
665     By default the ASM preconditioner uses 1 block per processor.
666 
667     These index sets cannot be destroyed until after completion of the
668     linear solves for which the ASM preconditioner is being used.
669 
670     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.
671 
672     Level: advanced
673 
674 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
675 
676 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
677           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
678 @*/
679 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
680 {
681   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
682 
683   PetscFunctionBegin;
684   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
685   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
686   if (f) {
687     ierr = (*f)(pc,n,is,is_local);CHKERRQ(ierr);
688   }
689   PetscFunctionReturn(0);
690 }
691 
692 #undef __FUNCT__
693 #define __FUNCT__ "PCASMSetTotalSubdomains"
694 /*@C
695     PCASMSetTotalSubdomains - Sets the subdomains for all processor for the
696     additive Schwarz preconditioner.  Either all or no processors in the
697     PC communicator must call this routine, with the same index sets.
698 
699     Collective on PC
700 
701     Input Parameters:
702 +   pc - the preconditioner context
703 .   n - the number of subdomains for all processors
704 .   is - the index sets that define the subdomains for all processor
705          (or PETSC_NULL for PETSc to determine subdomains)
706 -   is_local - the index sets that define the local part of the subdomains for this processor
707          (or PETSC_NULL to use the default of 1 subdomain per process)
708 
709     Options Database Key:
710     To set the total number of subdomain blocks rather than specify the
711     index sets, use the option
712 .    -pc_asm_blocks <blks> - Sets total blocks
713 
714     Notes:
715     Currently you cannot use this to set the actual subdomains with the argument is.
716 
717     By default the ASM preconditioner uses 1 block per processor.
718 
719     These index sets cannot be destroyed until after completion of the
720     linear solves for which the ASM preconditioner is being used.
721 
722     Use PCASMSetLocalSubdomains() to set local subdomains.
723 
724     Level: advanced
725 
726 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz
727 
728 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
729           PCASMCreateSubdomains2D()
730 @*/
731 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
732 {
733   PetscErrorCode ierr,(*f)(PC,PetscInt,IS[],IS[]);
734 
735   PetscFunctionBegin;
736   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
737   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",(void (**)(void))&f);CHKERRQ(ierr);
738   if (f) {
739     ierr = (*f)(pc,N,is,is_local);CHKERRQ(ierr);
740   }
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "PCASMSetOverlap"
746 /*@
747     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
748     additive Schwarz preconditioner.  Either all or no processors in the
749     PC communicator must call this routine.
750 
751     Collective on PC
752 
753     Input Parameters:
754 +   pc  - the preconditioner context
755 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
756 
757     Options Database Key:
758 .   -pc_asm_overlap <ovl> - Sets overlap
759 
760     Notes:
761     By default the ASM preconditioner uses 1 block per processor.  To use
762     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
763     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).
764 
765     The overlap defaults to 1, so if one desires that no additional
766     overlap be computed beyond what may have been set with a call to
767     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
768     must be set to be 0.  In particular, if one does not explicitly set
769     the subdomains an application code, then all overlap would be computed
770     internally by PETSc, and using an overlap of 0 would result in an ASM
771     variant that is equivalent to the block Jacobi preconditioner.
772 
773     Note that one can define initial index sets with any overlap via
774     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
775     PCASMSetOverlap() merely allows PETSc to extend that overlap further
776     if desired.
777 
778     Level: intermediate
779 
780 .keywords: PC, ASM, set, overlap
781 
782 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
783           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
784 @*/
785 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetOverlap(PC pc,PetscInt ovl)
786 {
787   PetscErrorCode ierr,(*f)(PC,PetscInt);
788 
789   PetscFunctionBegin;
790   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
791   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetOverlap_C",(void (**)(void))&f);CHKERRQ(ierr);
792   if (f) {
793     ierr = (*f)(pc,ovl);CHKERRQ(ierr);
794   }
795   PetscFunctionReturn(0);
796 }
797 
798 #undef __FUNCT__
799 #define __FUNCT__ "PCASMSetType"
800 /*@
801     PCASMSetType - Sets the type of restriction and interpolation used
802     for local problems in the additive Schwarz method.
803 
804     Collective on PC
805 
806     Input Parameters:
807 +   pc  - the preconditioner context
808 -   type - variant of ASM, one of
809 .vb
810       PC_ASM_BASIC       - full interpolation and restriction
811       PC_ASM_RESTRICT    - full restriction, local processor interpolation
812       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
813       PC_ASM_NONE        - local processor restriction and interpolation
814 .ve
815 
816     Options Database Key:
817 .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
818 
819     Level: intermediate
820 
821 .keywords: PC, ASM, set, type
822 
823 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
824           PCASMCreateSubdomains2D()
825 @*/
826 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetType(PC pc,PCASMType type)
827 {
828   PetscErrorCode ierr,(*f)(PC,PCASMType);
829 
830   PetscFunctionBegin;
831   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
832   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetType_C",(void (**)(void))&f);CHKERRQ(ierr);
833   if (f) {
834     ierr = (*f)(pc,type);CHKERRQ(ierr);
835   }
836   PetscFunctionReturn(0);
837 }
838 
839 #undef __FUNCT__
840 #define __FUNCT__ "PCASMSetSortIndices"
841 /*@
842     PCASMSetSortIndices - Determines whether subdomain indices are sorted.
843 
844     Collective on PC
845 
846     Input Parameters:
847 +   pc  - the preconditioner context
848 -   doSort - sort the subdomain indices
849 
850     Level: intermediate
851 
852 .keywords: PC, ASM, set, type
853 
854 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
855           PCASMCreateSubdomains2D()
856 @*/
857 PetscErrorCode PETSCKSP_DLLEXPORT PCASMSetSortIndices(PC pc,PetscTruth doSort)
858 {
859   PetscErrorCode ierr,(*f)(PC,PetscTruth);
860 
861   PetscFunctionBegin;
862   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
863   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMSetSortIndices_C",(void (**)(void))&f);CHKERRQ(ierr);
864   if (f) {
865     ierr = (*f)(pc,doSort);CHKERRQ(ierr);
866   }
867   PetscFunctionReturn(0);
868 }
869 
870 #undef __FUNCT__
871 #define __FUNCT__ "PCASMGetSubKSP"
872 /*@C
873    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
874    this processor.
875 
876    Collective on PC iff first_local is requested
877 
878    Input Parameter:
879 .  pc - the preconditioner context
880 
881    Output Parameters:
882 +  n_local - the number of blocks on this processor or PETSC_NULL
883 .  first_local - the global number of the first block on this processor or PETSC_NULL,
884                  all processors must request or all must pass PETSC_NULL
885 -  ksp - the array of KSP contexts
886 
887    Note:
888    After PCASMGetSubKSP() the array of KSPes is not to be freed
889 
890    Currently for some matrix implementations only 1 block per processor
891    is supported.
892 
893    You must call KSPSetUp() before calling PCASMGetSubKSP().
894 
895    Level: advanced
896 
897 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context
898 
899 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
900           PCASMCreateSubdomains2D(),
901 @*/
902 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
903 {
904   PetscErrorCode ierr,(*f)(PC,PetscInt*,PetscInt*,KSP **);
905 
906   PetscFunctionBegin;
907   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
908   ierr = PetscObjectQueryFunction((PetscObject)pc,"PCASMGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr);
909   if (f) {
910     ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr);
911   } else {
912     SETERRQ(PETSC_ERR_ARG_WRONG,"Cannot get subksp for this type of PC");
913   }
914 
915  PetscFunctionReturn(0);
916 }
917 
918 /* -------------------------------------------------------------------------------------*/
919 /*MC
920    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
921            its own KSP object.
922 
923    Options Database Keys:
924 +  -pc_asm_truelocal - Activates PCASMSetUseTrueLocal()
925 .  -pc_asm_blocks <blks> - Sets total blocks
926 .  -pc_asm_overlap <ovl> - Sets overlap
927 -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type
928 
929      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
930       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
931       -pc_asm_type basic to use the standard ASM.
932 
933    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
934      than one processor. Defaults to one block per processor.
935 
936      To set options on the solvers for each block append -sub_ to all the KSP, and PC
937         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
938 
939      To set the options on the solvers separate for each block call PCASMGetSubKSP()
940          and set the options directly on the resulting KSP object (you can access its PC
941          with KSPGetPC())
942 
943 
944    Level: beginner
945 
946    Concepts: additive Schwarz method
947 
948     References:
949     An additive variant of the Schwarz alternating method for the case of many subregions
950     M Dryja, OB Widlund - Courant Institute, New York University Technical report
951 
952     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
953     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
954 
955 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
956            PCBJACOBI, PCASMSetUseTrueLocal(), PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
957            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()
958 
959 M*/
960 
961 EXTERN_C_BEGIN
962 #undef __FUNCT__
963 #define __FUNCT__ "PCCreate_ASM"
964 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_ASM(PC pc)
965 {
966   PetscErrorCode ierr;
967   PC_ASM         *osm;
968 
969   PetscFunctionBegin;
970   ierr = PetscNewLog(pc,PC_ASM,&osm);CHKERRQ(ierr);
971   osm->n                 = PETSC_DECIDE;
972   osm->n_local           = 0;
973   osm->n_local_true      = 0;
974   osm->overlap           = 1;
975   osm->ksp               = 0;
976   osm->restriction       = 0;
977   osm->localization      = 0;
978   osm->prolongation      = 0;
979   osm->x                 = 0;
980   osm->y                 = 0;
981   osm->y_local           = 0;
982   osm->is                = 0;
983   osm->is_local          = 0;
984   osm->mat               = 0;
985   osm->pmat              = 0;
986   osm->type              = PC_ASM_RESTRICT;
987   osm->same_local_solves = PETSC_TRUE;
988   osm->sort_indices      = PETSC_TRUE;
989 
990   pc->data                   = (void*)osm;
991   pc->ops->apply             = PCApply_ASM;
992   pc->ops->applytranspose    = PCApplyTranspose_ASM;
993   pc->ops->setup             = PCSetUp_ASM;
994   pc->ops->destroy           = PCDestroy_ASM;
995   pc->ops->setfromoptions    = PCSetFromOptions_ASM;
996   pc->ops->setuponblocks     = PCSetUpOnBlocks_ASM;
997   pc->ops->view              = PCView_ASM;
998   pc->ops->applyrichardson   = 0;
999 
1000   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetLocalSubdomains_C","PCASMSetLocalSubdomains_ASM",
1001                     PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr);
1002   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetTotalSubdomains_C","PCASMSetTotalSubdomains_ASM",
1003                     PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr);
1004   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetOverlap_C","PCASMSetOverlap_ASM",
1005                     PCASMSetOverlap_ASM);CHKERRQ(ierr);
1006   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetType_C","PCASMSetType_ASM",
1007                     PCASMSetType_ASM);CHKERRQ(ierr);
1008   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMSetSortIndices_C","PCASMSetSortIndices_ASM",
1009                     PCASMSetSortIndices_ASM);CHKERRQ(ierr);
1010   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCASMGetSubKSP_C","PCASMGetSubKSP_ASM",
1011                     PCASMGetSubKSP_ASM);CHKERRQ(ierr);
1012   PetscFunctionReturn(0);
1013 }
1014 EXTERN_C_END
1015 
1016 
1017 #undef __FUNCT__
1018 #define __FUNCT__ "PCASMCreateSubdomains"
1019 /*@C
1020    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1021    preconditioner for a any problem on a general grid.
1022 
1023    Collective
1024 
1025    Input Parameters:
1026 +  A - The global matrix operator
1027 -  n - the number of local blocks
1028 
1029    Output Parameters:
1030 .  outis - the array of index sets defining the subdomains
1031 
1032    Level: advanced
1033 
1034    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1035     from these if you use PCASMSetLocalSubdomains()
1036 
1037     In the Fortran version you must provide the array outis[] already allocated of length n.
1038 
1039 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1040 
1041 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1042 @*/
1043 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1044 {
1045   MatPartitioning           mpart;
1046   const char                *prefix;
1047   PetscErrorCode            (*f)(Mat,PetscTruth*,MatReuse,Mat*);
1048   PetscMPIInt               size;
1049   PetscInt                  i,j,rstart,rend,bs;
1050   PetscTruth                iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1051   Mat                       Ad = PETSC_NULL, adj;
1052   IS                        ispart,isnumb,*is;
1053   PetscErrorCode            ierr;
1054 
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(A,MAT_COOKIE,1);
1057   PetscValidPointer(outis,3);
1058   if (n < 1) {SETERRQ1(PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);}
1059 
1060   /* Get prefix, row distribution, and block size */
1061   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1062   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1063   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1064   if (rstart/bs*bs != rstart || rend/bs*bs != rend) {
1065     SETERRQ3(PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1066   }
1067   /* Get diagonal block from matrix if possible */
1068   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1069   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1070   if (f) {
1071     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1072   } else if (size == 1) {
1073     iscopy = PETSC_FALSE; Ad = A;
1074   } else {
1075     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1076   }
1077   if (Ad) {
1078     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1079     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1080   }
1081   if (Ad && n > 1) {
1082     PetscTruth match,done;
1083     /* Try to setup a good matrix partitioning if available */
1084     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1085     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1086     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1087     ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_CURRENT,&match);CHKERRQ(ierr);
1088     if (!match) {
1089       ierr = PetscTypeCompare((PetscObject)mpart,MAT_PARTITIONING_SQUARE,&match);CHKERRQ(ierr);
1090     }
1091     if (!match) { /* assume a "good" partitioner is available */
1092       PetscInt na,*ia,*ja;
1093       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1094       if (done) {
1095 	/* Build adjacency matrix by hand. Unfortunately a call to
1096 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1097 	   remove the block-aij structure and we cannot expect
1098 	   MatPartitioning to split vertices as we need */
1099 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1100 	nnz = 0;
1101 	for (i=0; i<na; i++) { /* count number of nonzeros */
1102 	  len = ia[i+1] - ia[i];
1103 	  row = ja + ia[i];
1104 	  for (j=0; j<len; j++) {
1105 	    if (row[j] == i) { /* don't count diagonal */
1106 	      len--; break;
1107 	    }
1108 	  }
1109 	  nnz += len;
1110 	}
1111 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1112 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1113 	nnz    = 0;
1114 	iia[0] = 0;
1115 	for (i=0; i<na; i++) { /* fill adjacency */
1116 	  cnt = 0;
1117 	  len = ia[i+1] - ia[i];
1118 	  row = ja + ia[i];
1119 	  for (j=0; j<len; j++) {
1120 	    if (row[j] != i) { /* if not diagonal */
1121 	      jja[nnz+cnt++] = row[j];
1122 	    }
1123 	  }
1124 	  nnz += cnt;
1125 	  iia[i+1] = nnz;
1126 	}
1127 	/* Partitioning of the adjacency matrix */
1128 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1129 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1130 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1131 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1132 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1133 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1134 	foundpart = PETSC_TRUE;
1135       }
1136       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1137     }
1138     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1139   }
1140   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1141 
1142   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1143   *outis = is;
1144 
1145   if (!foundpart) {
1146 
1147     /* Partitioning by contiguous chunks of rows */
1148 
1149     PetscInt mbs   = (rend-rstart)/bs;
1150     PetscInt start = rstart;
1151     for (i=0; i<n; i++) {
1152       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1153       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1154       start += count;
1155     }
1156 
1157   } else {
1158 
1159     /* Partitioning by adjacency of diagonal block  */
1160 
1161     const PetscInt *numbering;
1162     PetscInt       *count,nidx,*indices,*newidx,start=0;
1163     /* Get node count in each partition */
1164     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1165     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1166     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1167       for (i=0; i<n; i++) count[i] *= bs;
1168     }
1169     /* Build indices from node numbering */
1170     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1171     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1172     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1173     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1174     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1175     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1176     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1177       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1178       for (i=0; i<nidx; i++)
1179 	for (j=0; j<bs; j++)
1180 	  newidx[i*bs+j] = indices[i]*bs + j;
1181       ierr = PetscFree(indices);CHKERRQ(ierr);
1182       nidx   *= bs;
1183       indices = newidx;
1184     }
1185     /* Shift to get global indices */
1186     for (i=0; i<nidx; i++) indices[i] += rstart;
1187 
1188     /* Build the index sets for each block */
1189     for (i=0; i<n; i++) {
1190       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],&is[i]);CHKERRQ(ierr);
1191       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1192       start += count[i];
1193     }
1194 
1195     ierr = PetscFree(count);
1196     ierr = PetscFree(indices);
1197     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1198     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1199 
1200   }
1201 
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "PCASMDestroySubdomains"
1207 /*@C
1208    PCASMDestroySubdomains - Destroys the index sets created with
1209    PCASMCreateSubdomains(). Should be called after setting subdomains
1210    with PCASMSetLocalSubdomains().
1211 
1212    Collective
1213 
1214    Input Parameters:
1215 +  n - the number of index sets
1216 .  is - the array of index sets
1217 -  is_local - the array of local index sets, can be PETSC_NULL
1218 
1219    Level: advanced
1220 
1221 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid
1222 
1223 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1224 @*/
1225 PetscErrorCode PETSCKSP_DLLEXPORT PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1226 {
1227   PetscInt       i;
1228   PetscErrorCode ierr;
1229   PetscFunctionBegin;
1230   if (n <= 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1231   PetscValidPointer(is,2);
1232   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1233   ierr = PetscFree(is);CHKERRQ(ierr);
1234   if (is_local) {
1235     PetscValidPointer(is_local,3);
1236     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1237     ierr = PetscFree(is_local);CHKERRQ(ierr);
1238   }
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 #undef __FUNCT__
1243 #define __FUNCT__ "PCASMCreateSubdomains2D"
1244 /*@
1245    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1246    preconditioner for a two-dimensional problem on a regular grid.
1247 
1248    Not Collective
1249 
1250    Input Parameters:
1251 +  m, n - the number of mesh points in the x and y directions
1252 .  M, N - the number of subdomains in the x and y directions
1253 .  dof - degrees of freedom per node
1254 -  overlap - overlap in mesh lines
1255 
1256    Output Parameters:
1257 +  Nsub - the number of subdomains created
1258 -  is - the array of index sets defining the subdomains
1259 
1260    Note:
1261    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1262    preconditioners.  More general related routines are
1263    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().
1264 
1265    Level: advanced
1266 
1267 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid
1268 
1269 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1270           PCASMSetOverlap()
1271 @*/
1272 PetscErrorCode PETSCKSP_DLLEXPORT PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is)
1273 {
1274   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outter;
1275   PetscErrorCode ierr;
1276   PetscInt       nidx,*idx,loc,ii,jj,count;
1277 
1278   PetscFunctionBegin;
1279   if (dof != 1) SETERRQ(PETSC_ERR_SUP," ");
1280 
1281   *Nsub = N*M;
1282   ierr = PetscMalloc((*Nsub)*sizeof(IS *),is);CHKERRQ(ierr);
1283   ystart = 0;
1284   loc_outter = 0;
1285   for (i=0; i<N; i++) {
1286     height = n/N + ((n % N) > i); /* height of subdomain */
1287     if (height < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1288     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1289     yright = ystart + height + overlap; if (yright > n) yright = n;
1290     xstart = 0;
1291     for (j=0; j<M; j++) {
1292       width = m/M + ((m % M) > j); /* width of subdomain */
1293       if (width < 2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1294       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1295       xright = xstart + width + overlap; if (xright > m) xright = m;
1296       nidx   = (xright - xleft)*(yright - yleft);
1297       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1298       loc    = 0;
1299       for (ii=yleft; ii<yright; ii++) {
1300         count = m*ii + xleft;
1301         for (jj=xleft; jj<xright; jj++) {
1302           idx[loc++] = count++;
1303         }
1304       }
1305       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,(*is)+loc_outter++);CHKERRQ(ierr);
1306       ierr = PetscFree(idx);CHKERRQ(ierr);
1307       xstart += width;
1308     }
1309     ystart += height;
1310   }
1311   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1312   PetscFunctionReturn(0);
1313 }
1314 
1315 #undef __FUNCT__
1316 #define __FUNCT__ "PCASMGetLocalSubdomains"
1317 /*@C
1318     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1319     only) for the additive Schwarz preconditioner.
1320 
1321     Collective on PC
1322 
1323     Input Parameter:
1324 .   pc - the preconditioner context
1325 
1326     Output Parameters:
1327 +   n - the number of subdomains for this processor (default value = 1)
1328 .   is - the index sets that define the subdomains for this processor
1329 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1330 
1331 
1332     Notes:
1333     The IS numbering is in the parallel, global numbering of the vector.
1334 
1335     Level: advanced
1336 
1337 .keywords: PC, ASM, set, local, subdomains, additive Schwarz
1338 
1339 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1340           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1341 @*/
1342 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1343 {
1344   PC_ASM         *osm;
1345   PetscErrorCode ierr;
1346   PetscTruth     match;
1347 
1348   PetscFunctionBegin;
1349   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1350   PetscValidIntPointer(n,2);
1351   if (is) PetscValidPointer(is,3);
1352   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1353   if (!match) {
1354     if (n)  *n  = 0;
1355     if (is) *is = PETSC_NULL;
1356   } else {
1357     osm = (PC_ASM*)pc->data;
1358     if (n)  *n  = osm->n_local_true;
1359     if (is) *is = osm->is;
1360     if (is_local) *is_local = osm->is_local;
1361   }
1362   PetscFunctionReturn(0);
1363 }
1364 
1365 #undef __FUNCT__
1366 #define __FUNCT__ "PCASMGetLocalSubmatrices"
1367 /*@C
1368     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1369     only) for the additive Schwarz preconditioner.
1370 
1371     Collective on PC
1372 
1373     Input Parameter:
1374 .   pc - the preconditioner context
1375 
1376     Output Parameters:
1377 +   n - the number of matrices for this processor (default value = 1)
1378 -   mat - the matrices
1379 
1380 
1381     Level: advanced
1382 
1383 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi
1384 
1385 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1386           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1387 @*/
1388 PetscErrorCode PETSCKSP_DLLEXPORT PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1389 {
1390   PC_ASM         *osm;
1391   PetscErrorCode ierr;
1392   PetscTruth     match;
1393 
1394   PetscFunctionBegin;
1395   PetscValidHeaderSpecific(pc,PC_COOKIE,1);
1396   PetscValidIntPointer(n,2);
1397   if (mat) PetscValidPointer(mat,3);
1398   if (!pc->setupcalled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1399   ierr = PetscTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr);
1400   if (!match) {
1401     if (n)   *n   = 0;
1402     if (mat) *mat = PETSC_NULL;
1403   } else {
1404     osm = (PC_ASM*)pc->data;
1405     if (n)   *n   = osm->n_local_true;
1406     if (mat) *mat = osm->pmat;
1407   }
1408   PetscFunctionReturn(0);
1409 }
1410