xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision c10bc1a15c3326c73cfa28ba5063292eba8d33d9)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
5   In this version each processor may have any number of subdomains and a subdomain may live on multiple
6   processors.
7 
8        N    - total number of true subdomains on all processors
9        n    - actual number of subdomains on this processor
10        nmax - maximum number of subdomains per processor
11 */
12 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   PetscInt   N,n,nmax;
16   PetscInt   overlap;             /* overlap requested by user */
17   KSP        *ksp;                /* linear solvers for each block */
18   Vec        gx,gy;               /* Merged work vectors */
19   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
20   IS         gis, gis_local;      /* merged ISs */
21   VecScatter grestriction;        /* merged restriction */
22   VecScatter gprolongation;       /* merged prolongation */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each local subdomain (same as subdomain with the overlap removed); may be NULL */
25   Mat        *pmat;               /* subdomain block matrices */
26   PCGASMType  type;               /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_GASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_GASM"
34 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
35 {
36   PC_GASM         *osm = (PC_GASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->nmax > 0)     {ierr = PetscSNPrintf(blocks,sizeof blocks,"max number of  subdomain blocks = %D",osm->nmax);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
53     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: restriction/interpolation type - %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
55     if (osm->same_local_solves) {
56       if (osm->ksp) {
57         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59         if (!rank) {
60           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
62           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63         }
64         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       }
66     } else {
67       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
71       for (i=0; i<osm->nmax; i++) {
72         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i < osm->n) {
74           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
75           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
76           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78         }
79         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
80       }
81       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
82       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
83     }
84   } else if (isstring) {
85     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCGASMTypes[osm->type]);CHKERRQ(ierr);
86     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
87     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
88     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
89   } else {
90     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "PCGASMPrintSubdomains"
97 static PetscErrorCode PCGASMPrintSubdomains(PC pc)
98 {
99   PC_GASM         *osm  = (PC_GASM*)pc->data;
100   const char     *prefix;
101   char           fname[PETSC_MAX_PATH_LEN+1];
102   PetscViewer    viewer;
103   PetscInt       i,j,nidx;
104   const PetscInt *idx;
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
109   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
110   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
111   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
112   for (i=0;i<osm->n;++i) {
113     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
114     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
115     for (j=0; j<nidx; j++) {
116       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
117     }
118     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
119     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
120     if (osm->is_local) {
121       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
123       for (j=0; j<nidx; j++) {
124         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
125       }
126       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
127       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
128     }
129   }
130   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
131   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PCSetUp_GASM"
137 static PetscErrorCode PCSetUp_GASM(PC pc)
138 {
139   PC_GASM         *osm  = (PC_GASM*)pc->data;
140   PetscErrorCode ierr;
141   PetscBool      symset,flg;
142   PetscInt       i,firstRow,lastRow;
143   PetscMPIInt    rank, size;
144   MatReuse       scall = MAT_REUSE_MATRIX;
145   KSP            ksp;
146   PC             subpc;
147   const char     *prefix,*pprefix;
148   PetscInt       dn;       /* Number of indices in a single subdomain assigned to this processor. */
149   const PetscInt *didx;    /* Indices from a single subdomain assigned to this processor. */
150   PetscInt       ddn;      /* Number of indices in all subdomains assigned to this processor. */
151   PetscInt       *ddidx;   /* Indices of all subdomains assigned to this processor. */
152   IS             gid;      /* Identity IS of the size of all subdomains assigned to this processor. */
153   Vec            x,y;
154   PetscScalar    *gxarray, *gyarray;
155 
156   PetscFunctionBegin;
157   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
158   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
159   if (!pc->setupcalled) {
160 
161     if (!osm->type_set) {
162       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
163       if (symset && flg) { osm->type = PC_GASM_BASIC; }
164     }
165 
166     if (osm->N == PETSC_DECIDE && osm->n < 1) {
167       /* no subdomains given, use one per processor */
168       osm->nmax = osm->n = 1;
169       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
170       osm->N = size;
171     } else if (osm->N == PETSC_DECIDE) {
172       PetscInt inwork[2], outwork[2];
173       /* determine global number of subdomains and the max number of local subdomains */
174       inwork[0] = inwork[1] = osm->n;
175       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
176       osm->nmax = outwork[0];
177       osm->N    = outwork[1];
178     }
179     if (!osm->is){ /* create the index sets */
180       ierr = PCGASMCreateSubdomains(pc->pmat,osm->n,&osm->is);CHKERRQ(ierr);
181     }
182     if (!osm->is_local) {
183       /*
184 	 This indicates that osm->is should define a nonoverlapping decomposition
185 	 (there is no way to really guarantee that if subdomains are set by the user through PCGASMSetLocalSubdomains,
186 	  but the assumption is that either the user does the right thing, or subdomains in ossm->is have been created
187 	  via PCGASMCreateSubdomains, which guarantees a nonoverlapping decomposition).
188 	 Therefore, osm->is will be used to define osm->is_local.
189 	 If a nonzero overlap has been requested by the user, then osm->is will be expanded and will overlap,
190 	 so osm->is_local should obtain a copy of osm->is while they are still (presumably) nonoverlapping.
191 	 Otherwise (no overlap has been requested), osm->is_local are simply aliases for osm->is.
192       */
193       ierr = PetscMalloc(osm->n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
194       for (i=0; i<osm->n; i++) {
195         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
196           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
197           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
198         } else {
199           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
200           osm->is_local[i] = osm->is[i];
201         }
202       }
203     }
204     /* Beyond this point osm->is_local is not null. */
205     if (osm->overlap > 0) {
206       /* Extend the "overlapping" regions by a number of steps */
207       ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->is,osm->overlap);CHKERRQ(ierr);
208     }
209     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
210     flg  = PETSC_FALSE;
211     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
212     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
213 
214     if (osm->sort_indices) {
215       for (i=0; i<osm->n; i++) {
216         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
217 	ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
218       }
219     }
220     /* Merge the ISs, create merged vectors and scatter contexts. */
221     /* Restriction ISs. */
222     ddn = 0;
223     for (i=0; i<osm->n; i++) {
224       ierr = ISGetLocalSize(osm->is[i],&dn);          CHKERRQ(ierr);
225       ddn += dn;
226     }
227     ierr = PetscMalloc(ddn*sizeof(PetscInt), &ddidx); CHKERRQ(ierr);
228     ddn = 0;
229     for (i=0; i<osm->n; i++) {
230       ierr = ISGetLocalSize(osm->is[i],&dn); CHKERRQ(ierr);
231       ierr = ISGetIndices(osm->is[i],&didx); CHKERRQ(ierr);
232       ierr = PetscMemcpy(ddidx+ddn, didx, sizeof(PetscInt)*dn); CHKERRQ(ierr);
233       ierr = ISRestoreIndices(osm->is[i], &didx); CHKERRQ(ierr);
234       ddn += dn;
235     }
236     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, ddn, ddidx, PETSC_OWN_POINTER, &osm->gis); CHKERRQ(ierr);
237     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
238     ierr = VecCreateMPI(((PetscObject)pc)->comm, ddn, PETSC_DECIDE, &osm->gx); CHKERRQ(ierr);
239     ierr = VecDuplicate(osm->gx,&osm->gy);                                     CHKERRQ(ierr);
240     ierr = ISCreateStride(((PetscObject)pc)->comm,ddn,0,1, &gid);              CHKERRQ(ierr);
241     ierr = VecScatterCreate(x,osm->gis,osm->gx,gid, &(osm->grestriction));   CHKERRQ(ierr);
242     ierr = ISDestroy(gid);                                                     CHKERRQ(ierr);
243     /* Prolongation ISs */
244     { PetscInt       dn_local;       /* Number of indices in the local part of single domain assigned to this processor. */
245       const PetscInt *didx_local;    /* Global indices from the local part of a single domain assigned to this processor. */
246       PetscInt       ddn_local;      /* Number of indices in the local part of the disjoint union all domains assigned to this processor. */
247       PetscInt       *ddidx_local;   /* Global indices of the local part of the disjoint union of all domains assigned to this processor. */
248       /**/
249       ISLocalToGlobalMapping ltog;          /* Map from global to local indices on the disjoint union of subdomains: "local" ind's run from 0 to ddn-1. */
250       PetscInt              *ddidx_llocal;  /* Mapped local indices of the disjoint union of local parts of subdomains. */
251       PetscInt               ddn_llocal;    /* Number of indices in ddidx_llocal; must equal ddn_local, or else gis_local is not a sub-IS of gis. */
252       IS                     gis_llocal;    /* IS with ddidx_llocal indices. */
253       PetscInt               j;
254       ddn_local = 0;
255       for (i=0; i<osm->n; i++) {
256 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
257 	ddn_local += dn_local;
258       }
259       ierr = PetscMalloc(ddn_local*sizeof(PetscInt), &ddidx_local); CHKERRQ(ierr);
260       ddn_local = 0;
261       for (i=0; i<osm->n; i++) {
262 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
263 	ierr = ISGetIndices(osm->is_local[i],&didx_local); CHKERRQ(ierr);
264 	ierr = PetscMemcpy(ddidx_local+ddn_local, didx_local, sizeof(PetscInt)*dn_local); CHKERRQ(ierr);
265 	ierr = ISRestoreIndices(osm->is_local[i], &didx_local); CHKERRQ(ierr);
266 	ddn_local += dn_local;
267       }
268       ierr = PetscMalloc(sizeof(PetscInt)*ddn_local, &ddidx_llocal); CHKERRQ(ierr);
269       ierr = ISCreateGeneral(((PetscObject)pc)->comm, ddn_local, ddidx_local, PETSC_OWN_POINTER, &(osm->gis_local)); CHKERRQ(ierr);
270       ierr = ISLocalToGlobalMappingCreateIS(osm->gis,&ltog);CHKERRQ(ierr);
271       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,dn_local,ddidx_local,&ddn_llocal,ddidx_llocal);CHKERRQ(ierr);
272       ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
273       if (ddn_llocal != ddn_local) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"gis_local contains %D indices outside of gis", ddn_llocal - ddn_local);
274       /* Now convert these localized indices into the global indices into the merged output vector. */
275       ierr = VecGetOwnershipRange(osm->gy, &firstRow, &lastRow); CHKERRQ(ierr);
276       for(j=0; j < ddn_llocal; ++j) {
277 	ddidx_llocal[j] += firstRow;
278       }
279       ierr = ISCreateGeneral(PETSC_COMM_SELF,ddn_llocal,ddidx_llocal,PETSC_OWN_POINTER,&gis_llocal); CHKERRQ(ierr);
280       ierr = VecScatterCreate(y,osm->gis_local,osm->gy,gis_llocal,&osm->gprolongation);              CHKERRQ(ierr);
281       ierr = ISDestroy(gis_llocal);CHKERRQ(ierr);
282     }
283     /* Create the subdomain work vectors. */
284     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
285     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
286     ierr = VecGetOwnershipRange(osm->gx, &firstRow, &lastRow);CHKERRQ(ierr);
287     ierr = VecGetArray(osm->gx, &gxarray);                                     CHKERRQ(ierr);
288     ierr = VecGetArray(osm->gy, &gyarray);                                     CHKERRQ(ierr);
289     for (i=0, ddn=0; i<osm->n; ++i, ddn += dn) {
290       ierr = ISGetLocalSize(osm->is[i],&dn);CHKERRQ(ierr);
291       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gxarray+ddn,&osm->x[i]);CHKERRQ(ierr);
292       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gyarray+ddn,&osm->y[i]);CHKERRQ(ierr);
293     }
294     ierr = VecRestoreArray(osm->gx, &gxarray); CHKERRQ(ierr);
295     ierr = VecRestoreArray(osm->gy, &gyarray); CHKERRQ(ierr);
296     ierr = VecDestroy(x); CHKERRQ(ierr);
297     ierr = VecDestroy(y); CHKERRQ(ierr);
298     /* Create the local solvers */
299     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
300     for (i=0; i<osm->n; i++) { /* KSPs are local */
301       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
302       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
303       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
304       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
305       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
306       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
307       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
308       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
309       osm->ksp[i] = ksp;
310     }
311     scall = MAT_INITIAL_MATRIX;
312 
313   } else {
314     /*
315        Destroy the blocks from the previous iteration
316     */
317     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
318       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
319       scall = MAT_INITIAL_MATRIX;
320     }
321   }
322 
323   /*
324      Extract out the submatrices.
325   */
326   ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->is, osm->is,scall,&osm->pmat);CHKERRQ(ierr);
327   if (scall == MAT_INITIAL_MATRIX) {
328     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
329     for (i=0; i<osm->n; i++) {
330       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
331       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
332     }
333   }
334 
335   /* Return control to the user so that the submatrices can be modified (e.g., to apply
336      different boundary conditions for the submatrices than for the global problem) */
337   ierr = PCModifySubMatrices(pc,osm->n,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
338 
339   /*
340      Loop over submatrices putting them into local ksp
341   */
342   for (i=0; i<osm->n; i++) {
343     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
344     if (!pc->setupcalled) {
345       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
346     }
347   }
348 
349   PetscFunctionReturn(0);
350 }
351 
352 #undef __FUNCT__
353 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
354 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
355 {
356   PC_GASM         *osm = (PC_GASM*)pc->data;
357   PetscErrorCode ierr;
358   PetscInt       i;
359 
360   PetscFunctionBegin;
361   for (i=0; i<osm->n; i++) {
362     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
363   }
364   PetscFunctionReturn(0);
365 }
366 
367 #undef __FUNCT__
368 #define __FUNCT__ "PCApply_GASM"
369 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
370 {
371   PC_GASM         *osm = (PC_GASM*)pc->data;
372   PetscErrorCode ierr;
373   PetscInt       i;
374   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
375 
376   PetscFunctionBegin;
377   /*
378      Support for limiting the restriction or interpolation to only local
379      subdomain values (leaving the other values 0).
380   */
381   if (!(osm->type & PC_GASM_RESTRICT)) {
382     forward = SCATTER_FORWARD_LOCAL;
383     /* have to zero the work RHS since scatter may leave some slots empty */
384     ierr = VecZeroEntries(osm->gx); CHKERRQ(ierr);
385   }
386   if (!(osm->type & PC_GASM_INTERPOLATE)) {
387     reverse = SCATTER_REVERSE_LOCAL;
388   }
389 
390   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
391   ierr = VecZeroEntries(y);CHKERRQ(ierr);
392   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
393   /* do the local solves */
394   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
395     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
396   }
397   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
398   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
399   PetscFunctionReturn(0);
400 }
401 
402 #undef __FUNCT__
403 #define __FUNCT__ "PCApplyTranspose_GASM"
404 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
405 {
406   PC_GASM         *osm = (PC_GASM*)pc->data;
407   PetscErrorCode ierr;
408   PetscInt       i;
409   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
410 
411   PetscFunctionBegin;
412   /*
413      Support for limiting the restriction or interpolation to only local
414      subdomain values (leaving the other values 0).
415 
416      Note: these are reversed from the PCApply_GASM() because we are applying the
417      transpose of the three terms
418   */
419   if (!(osm->type & PC_GASM_INTERPOLATE)) {
420     forward = SCATTER_FORWARD_LOCAL;
421     /* have to zero the work RHS since scatter may leave some slots empty */
422     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
423   }
424   if (!(osm->type & PC_GASM_RESTRICT)) {
425     reverse = SCATTER_REVERSE_LOCAL;
426   }
427 
428   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
429   ierr = VecZeroEntries(y);CHKERRQ(ierr);
430   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
431   /* do the local solves */
432   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
433     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
434   }
435   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
436   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
437   PetscFunctionReturn(0);
438 }
439 
440 #undef __FUNCT__
441 #define __FUNCT__ "PCDestroy_GASM"
442 static PetscErrorCode PCDestroy_GASM(PC pc)
443 {
444   PC_GASM         *osm = (PC_GASM*)pc->data;
445   PetscErrorCode ierr;
446   PetscInt       i;
447 
448   PetscFunctionBegin;
449   if (osm->ksp) {
450     for (i=0; i<osm->n; i++) {
451       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
452     }
453     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
454   }
455   if (osm->pmat) {
456     if (osm->n > 0) {
457       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
458     }
459   }
460   for (i=0; i<osm->n; i++) {
461     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
462     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
463   }
464   ierr = PetscFree(osm->x);CHKERRQ(ierr);
465   ierr = PetscFree(osm->y);CHKERRQ(ierr);
466   ierr = VecDestroy(osm->gx); CHKERRQ(ierr);
467   ierr = VecDestroy(osm->gy); CHKERRQ(ierr);
468 
469   ierr = VecScatterDestroy(osm->grestriction); CHKERRQ(ierr);
470   ierr = VecScatterDestroy(osm->gprolongation); CHKERRQ(ierr);
471 
472   if (osm->is) {
473     ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
474   }
475   ierr = PetscFree(osm);CHKERRQ(ierr);
476   PetscFunctionReturn(0);
477 }
478 
479 #undef __FUNCT__
480 #define __FUNCT__ "PCSetFromOptions_GASM"
481 static PetscErrorCode PCSetFromOptions_GASM(PC pc) {
482   PC_GASM         *osm = (PC_GASM*)pc->data;
483   PetscErrorCode ierr;
484   PetscInt       blocks,ovl;
485   PetscBool      symset,flg;
486   PCGASMType      gasmtype;
487 
488   PetscFunctionBegin;
489   /* set the type to symmetric if matrix is symmetric */
490   if (!osm->type_set && pc->pmat) {
491     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
492     if (symset && flg) { osm->type = PC_GASM_BASIC; }
493   }
494   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
495     ierr = PetscOptionsInt("-pc_gasm_blocks","Number of subdomains","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
496     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks);CHKERRQ(ierr); }
497     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of grid points overlap","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
498     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
499     flg  = PETSC_FALSE;
500     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
501     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
502   ierr = PetscOptionsTail();CHKERRQ(ierr);
503   PetscFunctionReturn(0);
504 }
505 
506 /*------------------------------------------------------------------------------------*/
507 
508 EXTERN_C_BEGIN
509 #undef __FUNCT__
510 #define __FUNCT__ "PCGASMSetLocalSubdomains_GASM"
511 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains_GASM(PC pc,PetscInt n,IS is[],IS is_local[])
512 {
513   PC_GASM         *osm = (PC_GASM*)pc->data;
514   PetscErrorCode ierr;
515   PetscInt       i;
516 
517   PetscFunctionBegin;
518   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
519   if (pc->setupcalled && (n != osm->n || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetLocalSubdomains() should be called before calling PCSetUp().");
520 
521   if (!pc->setupcalled) {
522     osm->n            = n;
523     osm->is           = 0;
524     osm->is_local     = 0;
525     if (is) {
526       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
527     }
528     if (is_local) {
529       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
530     }
531     if (osm->is) {
532       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
533     }
534     if (is) {
535       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
536       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
537       /* Flag indicating that the user has set overlapping subdomains so PCGASM should not increase their size. */
538       osm->overlap = -1;
539     }
540     if (is_local) {
541       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
542       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
543     }
544   }
545   PetscFunctionReturn(0);
546 }
547 EXTERN_C_END
548 
549 EXTERN_C_BEGIN
550 #undef __FUNCT__
551 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
552 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N) {
553   PC_GASM         *osm = (PC_GASM*)pc->data;
554   PetscErrorCode ierr;
555   PetscMPIInt    rank,size;
556   PetscInt       n;
557 
558   PetscFunctionBegin;
559   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
560 
561   /*
562      Split the subdomains equally among all processors
563   */
564   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
565   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
566   n = N/size + ((N % size) > rank);
567   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
568   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
569   if (!pc->setupcalled) {
570     if (osm->is) {
571       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
572     }
573     osm->N            = N;
574     osm->n            = n;
575     osm->is           = 0;
576     osm->is_local     = 0;
577   }
578   PetscFunctionReturn(0);
579 }/* PCGASMSetTotalSubdomains_GASM() */
580 EXTERN_C_END
581 
582 EXTERN_C_BEGIN
583 #undef __FUNCT__
584 #define __FUNCT__ "PCGASMSetOverlap_GASM"
585 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
586 {
587   PC_GASM *osm = (PC_GASM*)pc->data;
588 
589   PetscFunctionBegin;
590   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
591   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
592   if (!pc->setupcalled) {
593     osm->overlap = ovl;
594   }
595   PetscFunctionReturn(0);
596 }
597 EXTERN_C_END
598 
599 EXTERN_C_BEGIN
600 #undef __FUNCT__
601 #define __FUNCT__ "PCGASMSetType_GASM"
602 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType_GASM(PC pc,PCGASMType type)
603 {
604   PC_GASM *osm = (PC_GASM*)pc->data;
605 
606   PetscFunctionBegin;
607   osm->type     = type;
608   osm->type_set = PETSC_TRUE;
609   PetscFunctionReturn(0);
610 }
611 EXTERN_C_END
612 
613 EXTERN_C_BEGIN
614 #undef __FUNCT__
615 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
616 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
617 {
618   PC_GASM *osm = (PC_GASM*)pc->data;
619 
620   PetscFunctionBegin;
621   osm->sort_indices = doSort;
622   PetscFunctionReturn(0);
623 }
624 EXTERN_C_END
625 
626 EXTERN_C_BEGIN
627 #undef __FUNCT__
628 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
629 /*
630    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
631         In particular, it would upset the global subdomain number calculation.
632 */
633 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
634 {
635   PC_GASM         *osm = (PC_GASM*)pc->data;
636   PetscErrorCode ierr;
637 
638   PetscFunctionBegin;
639   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
640 
641   if (n) {
642     *n = osm->n;
643   }
644   if (first) {
645     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
646     *first -= osm->n;
647   }
648   if (ksp) {
649     /* Assume that local solves are now different; not necessarily
650        true though!  This flag is used only for PCView_GASM() */
651     *ksp                   = osm->ksp;
652     osm->same_local_solves = PETSC_FALSE;
653   }
654   PetscFunctionReturn(0);
655 }/* PCGASMGetSubKSP_GASM() */
656 EXTERN_C_END
657 
658 
659 #undef __FUNCT__
660 #define __FUNCT__ "PCGASMSetLocalSubdomains"
661 /*@C
662     PCGASMSetLocalSubdomains - Sets the local subdomains (for this processor
663     only) for the additive Schwarz preconditioner.
664 
665     Collective on PC
666 
667     Input Parameters:
668 +   pc - the preconditioner context
669 .   n - the number of subdomains for this processor (default value = 1)
670 .   is - the index set that defines the subdomains for this processor
671          (or PETSC_NULL for PETSc to determine subdomains)
672 -   is_local - the index sets that define the local part of the subdomains for this processor
673          (or PETSC_NULL to use the default of 1 subdomain per process)
674 
675     Notes:
676     The IS numbering is in the parallel, global numbering of the vector.
677 
678     By default the GASM preconditioner uses 1 block per processor.
679 
680     Use PCGASMSetTotalSubdomains() to set the subdomains for all processors.
681 
682     Level: advanced
683 
684 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
685 
686 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
687           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
688 @*/
689 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
690 {
691   PetscErrorCode ierr;
692 
693   PetscFunctionBegin;
694   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
695   ierr = PetscTryMethod(pc,"PCGASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
696   PetscFunctionReturn(0);
697 }
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "PCGASMSetTotalSubdomains"
701 /*@C
702     PCGASMSetTotalSubdomains - Sets the subdomains for all processor for the
703     additive Schwarz preconditioner.  Either all or no processors in the
704     PC communicator must call this routine, with the same index sets.
705 
706     Collective on PC
707 
708     Input Parameters:
709 +   pc - the preconditioner context
710 .   n - the number of subdomains for all processors
711 .   is - the index sets that define the subdomains for all processor
712          (or PETSC_NULL for PETSc to determine subdomains)
713 -   is_local - the index sets that define the local part of the subdomains for this processor
714          (or PETSC_NULL to use the default of 1 subdomain per process)
715 
716     Options Database Key:
717     To set the total number of subdomain blocks rather than specify the
718     index sets, use the option
719 .    -pc_gasm_blocks <blks> - Sets total blocks
720 
721     Notes:
722     Currently you cannot use this to set the actual subdomains with the argument is.
723 
724     By default the GASM preconditioner uses 1 block per processor.
725 
726     These index sets cannot be destroyed until after completion of the
727     linear solves for which the GASM preconditioner is being used.
728 
729     Use PCGASMSetLocalSubdomains() to set local subdomains.
730 
731     Level: advanced
732 
733 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
734 
735 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
736           PCGASMCreateSubdomains2D()
737 @*/
738 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains(PC pc,PetscInt N)
739 {
740   PetscErrorCode ierr;
741 
742   PetscFunctionBegin;
743   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
744   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt),(pc,N));CHKERRQ(ierr);
745   PetscFunctionReturn(0);
746 }
747 
748 #undef __FUNCT__
749 #define __FUNCT__ "PCGASMSetOverlap"
750 /*@
751     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
752     additive Schwarz preconditioner.  Either all or no processors in the
753     PC communicator must call this routine.
754 
755     Logically Collective on PC
756 
757     Input Parameters:
758 +   pc  - the preconditioner context
759 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
760 
761     Options Database Key:
762 .   -pc_gasm_overlap <ovl> - Sets overlap
763 
764     Notes:
765     By default the GASM preconditioner uses 1 block per processor.  To use
766     multiple blocks per perocessor, see PCGASMSetTotalSubdomains() and
767     PCGASMSetLocalSubdomains() (and the option -pc_gasm_blocks <blks>).
768 
769     The overlap defaults to 1, so if one desires that no additional
770     overlap be computed beyond what may have been set with a call to
771     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(), then ovl
772     must be set to be 0.  In particular, if one does not explicitly set
773     the subdomains an application code, then all overlap would be computed
774     internally by PETSc, and using an overlap of 0 would result in an GASM
775     variant that is equivalent to the block Jacobi preconditioner.
776 
777     Note that one can define initial index sets with any overlap via
778     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(); the routine
779     PCGASMSetOverlap() merely allows PETSc to extend that overlap further
780     if desired.
781 
782     Level: intermediate
783 
784 .keywords: PC, GASM, set, overlap
785 
786 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
787           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
788 @*/
789 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap(PC pc,PetscInt ovl)
790 {
791   PetscErrorCode ierr;
792 
793   PetscFunctionBegin;
794   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
795   PetscValidLogicalCollectiveInt(pc,ovl,2);
796   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
797   PetscFunctionReturn(0);
798 }
799 
800 #undef __FUNCT__
801 #define __FUNCT__ "PCGASMSetType"
802 /*@
803     PCGASMSetType - Sets the type of restriction and interpolation used
804     for local problems in the additive Schwarz method.
805 
806     Logically Collective on PC
807 
808     Input Parameters:
809 +   pc  - the preconditioner context
810 -   type - variant of GASM, one of
811 .vb
812       PC_GASM_BASIC       - full interpolation and restriction
813       PC_GASM_RESTRICT    - full restriction, local processor interpolation
814       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
815       PC_GASM_NONE        - local processor restriction and interpolation
816 .ve
817 
818     Options Database Key:
819 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
820 
821     Level: intermediate
822 
823 .keywords: PC, GASM, set, type
824 
825 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
826           PCGASMCreateSubdomains2D()
827 @*/
828 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType(PC pc,PCGASMType type)
829 {
830   PetscErrorCode ierr;
831 
832   PetscFunctionBegin;
833   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
834   PetscValidLogicalCollectiveEnum(pc,type,2);
835   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
836   PetscFunctionReturn(0);
837 }
838 
839 #undef __FUNCT__
840 #define __FUNCT__ "PCGASMSetSortIndices"
841 /*@
842     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
843 
844     Logically Collective on PC
845 
846     Input Parameters:
847 +   pc  - the preconditioner context
848 -   doSort - sort the subdomain indices
849 
850     Level: intermediate
851 
852 .keywords: PC, GASM, set, type
853 
854 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
855           PCGASMCreateSubdomains2D()
856 @*/
857 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices(PC pc,PetscBool  doSort)
858 {
859   PetscErrorCode ierr;
860 
861   PetscFunctionBegin;
862   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
863   PetscValidLogicalCollectiveBool(pc,doSort,2);
864   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
865   PetscFunctionReturn(0);
866 }
867 
868 #undef __FUNCT__
869 #define __FUNCT__ "PCGASMGetSubKSP"
870 /*@C
871    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
872    this processor.
873 
874    Collective on PC iff first_local is requested
875 
876    Input Parameter:
877 .  pc - the preconditioner context
878 
879    Output Parameters:
880 +  n_local - the number of blocks on this processor or PETSC_NULL
881 .  first_local - the global number of the first block on this processor or PETSC_NULL,
882                  all processors must request or all must pass PETSC_NULL
883 -  ksp - the array of KSP contexts
884 
885    Note:
886    After PCGASMGetSubKSP() the array of KSPes is not to be freed
887 
888    Currently for some matrix implementations only 1 block per processor
889    is supported.
890 
891    You must call KSPSetUp() before calling PCGASMGetSubKSP().
892 
893    Level: advanced
894 
895 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
896 
897 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap(),
898           PCGASMCreateSubdomains2D(),
899 @*/
900 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
901 {
902   PetscErrorCode ierr;
903 
904   PetscFunctionBegin;
905   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
906   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
907   PetscFunctionReturn(0);
908 }
909 
910 /* -------------------------------------------------------------------------------------*/
911 /*MC
912    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
913            its own KSP object.
914 
915    Options Database Keys:
916 +  -pc_gasm_truelocal - Activates PCGGASMSetUseTrueLocal()
917 .  -pc_gasm_blocks <blks> - Sets total blocks
918 .  -pc_gasm_overlap <ovl> - Sets overlap
919 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
920 
921      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
922       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
923       -pc_gasm_type basic to use the standard GASM.
924 
925    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
926      than one processor. Defaults to one block per processor.
927 
928      To set options on the solvers for each block append -sub_ to all the KSP, and PC
929         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
930 
931      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
932          and set the options directly on the resulting KSP object (you can access its PC
933          with KSPGetPC())
934 
935 
936    Level: beginner
937 
938    Concepts: additive Schwarz method
939 
940     References:
941     An additive variant of the Schwarz alternating method for the case of many subregions
942     M Dryja, OB Widlund - Courant Institute, New York University Technical report
943 
944     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
945     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
946 
947 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
948            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetLocalSubdomains(),
949            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
950 
951 M*/
952 
953 EXTERN_C_BEGIN
954 #undef __FUNCT__
955 #define __FUNCT__ "PCCreate_GASM"
956 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_GASM(PC pc)
957 {
958   PetscErrorCode ierr;
959   PC_GASM         *osm;
960 
961   PetscFunctionBegin;
962   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
963   osm->N                 = PETSC_DECIDE;
964   osm->n                 = 0;
965   osm->nmax              = 0;
966   osm->overlap           = 1;
967   osm->ksp               = 0;
968   osm->grestriction      = 0;
969   osm->gprolongation     = 0;
970   osm->gx                = 0;
971   osm->gy                = 0;
972   osm->x                 = 0;
973   osm->y                 = 0;
974   osm->is                = 0;
975   osm->is_local          = 0;
976   osm->pmat              = 0;
977   osm->type              = PC_GASM_RESTRICT;
978   osm->same_local_solves = PETSC_TRUE;
979   osm->sort_indices      = PETSC_TRUE;
980 
981   pc->data                   = (void*)osm;
982   pc->ops->apply             = PCApply_GASM;
983   pc->ops->applytranspose    = PCApplyTranspose_GASM;
984   pc->ops->setup             = PCSetUp_GASM;
985   pc->ops->destroy           = PCDestroy_GASM;
986   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
987   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
988   pc->ops->view              = PCView_GASM;
989   pc->ops->applyrichardson   = 0;
990 
991   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetLocalSubdomains_C","PCGASMSetLocalSubdomains_GASM",
992                     PCGASMSetLocalSubdomains_GASM);CHKERRQ(ierr);
993   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
994                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
995   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
996                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
997   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
998                     PCGASMSetType_GASM);CHKERRQ(ierr);
999   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1000                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1001   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1002                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1003   PetscFunctionReturn(0);
1004 }
1005 EXTERN_C_END
1006 
1007 
1008 #undef __FUNCT__
1009 #define __FUNCT__ "PCGASMCreateSubdomains"
1010 /*@C
1011    PCGASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1012    preconditioner for a any problem on a general grid.
1013 
1014    Collective
1015 
1016    Input Parameters:
1017 +  A - The global matrix operator
1018 -  n - the number of local blocks
1019 
1020    Output Parameters:
1021 .  outis - the array of index sets defining the subdomains
1022 
1023    Level: advanced
1024 
1025    Note: this generates nonoverlapping subdomains; the PCGASM will generate the overlap
1026     from these if you use PCGASMSetLocalSubdomains()
1027 
1028     In the Fortran version you must provide the array outis[] already allocated of length n.
1029 
1030 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1031 
1032 .seealso: PCGASMSetLocalSubdomains(), PCGASMDestroySubdomains()
1033 @*/
1034 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1035 {
1036   MatPartitioning           mpart;
1037   const char                *prefix;
1038   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1039   PetscMPIInt               size;
1040   PetscInt                  i,j,rstart,rend,bs;
1041   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1042   Mat                       Ad = PETSC_NULL, adj;
1043   IS                        ispart,isnumb,*is;
1044   PetscErrorCode            ierr;
1045 
1046   PetscFunctionBegin;
1047   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1048   PetscValidPointer(outis,3);
1049   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1050 
1051   /* Get prefix, row distribution, and block size */
1052   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1053   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1054   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1055   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1056 
1057   /* Get diagonal block from matrix if possible */
1058   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1059   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1060   if (f) {
1061     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1062   } else if (size == 1) {
1063     iscopy = PETSC_FALSE; Ad = A;
1064   } else {
1065     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1066   }
1067   if (Ad) {
1068     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1069     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1070   }
1071   if (Ad && n > 1) {
1072     PetscBool  match,done;
1073     /* Try to setup a good matrix partitioning if available */
1074     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1075     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1076     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1077     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1078     if (!match) {
1079       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1080     }
1081     if (!match) { /* assume a "good" partitioner is available */
1082       PetscInt na,*ia,*ja;
1083       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1084       if (done) {
1085 	/* Build adjacency matrix by hand. Unfortunately a call to
1086 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1087 	   remove the block-aij structure and we cannot expect
1088 	   MatPartitioning to split vertices as we need */
1089 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1090 	nnz = 0;
1091 	for (i=0; i<na; i++) { /* count number of nonzeros */
1092 	  len = ia[i+1] - ia[i];
1093 	  row = ja + ia[i];
1094 	  for (j=0; j<len; j++) {
1095 	    if (row[j] == i) { /* don't count diagonal */
1096 	      len--; break;
1097 	    }
1098 	  }
1099 	  nnz += len;
1100 	}
1101 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1102 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1103 	nnz    = 0;
1104 	iia[0] = 0;
1105 	for (i=0; i<na; i++) { /* fill adjacency */
1106 	  cnt = 0;
1107 	  len = ia[i+1] - ia[i];
1108 	  row = ja + ia[i];
1109 	  for (j=0; j<len; j++) {
1110 	    if (row[j] != i) { /* if not diagonal */
1111 	      jja[nnz+cnt++] = row[j];
1112 	    }
1113 	  }
1114 	  nnz += cnt;
1115 	  iia[i+1] = nnz;
1116 	}
1117 	/* Partitioning of the adjacency matrix */
1118 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1119 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1120 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1121 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1122 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1123 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1124 	foundpart = PETSC_TRUE;
1125       }
1126       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1127     }
1128     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1129   }
1130   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1131 
1132   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1133   *outis = is;
1134 
1135   if (!foundpart) {
1136 
1137     /* Partitioning by contiguous chunks of rows */
1138 
1139     PetscInt mbs   = (rend-rstart)/bs;
1140     PetscInt start = rstart;
1141     for (i=0; i<n; i++) {
1142       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1143       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1144       start += count;
1145     }
1146 
1147   } else {
1148 
1149     /* Partitioning by adjacency of diagonal block  */
1150 
1151     const PetscInt *numbering;
1152     PetscInt       *count,nidx,*indices,*newidx,start=0;
1153     /* Get node count in each partition */
1154     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1155     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1156     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1157       for (i=0; i<n; i++) count[i] *= bs;
1158     }
1159     /* Build indices from node numbering */
1160     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1161     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1162     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1163     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1164     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1165     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1166     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1167       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1168       for (i=0; i<nidx; i++)
1169 	for (j=0; j<bs; j++)
1170 	  newidx[i*bs+j] = indices[i]*bs + j;
1171       ierr = PetscFree(indices);CHKERRQ(ierr);
1172       nidx   *= bs;
1173       indices = newidx;
1174     }
1175     /* Shift to get global indices */
1176     for (i=0; i<nidx; i++) indices[i] += rstart;
1177 
1178     /* Build the index sets for each block */
1179     for (i=0; i<n; i++) {
1180       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1181       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1182       start += count[i];
1183     }
1184 
1185     ierr = PetscFree(count);
1186     ierr = PetscFree(indices);
1187     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1188     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1189 
1190   }
1191 
1192   PetscFunctionReturn(0);
1193 }
1194 
1195 #undef __FUNCT__
1196 #define __FUNCT__ "PCGASMDestroySubdomains"
1197 /*@C
1198    PCGASMDestroySubdomains - Destroys the index sets created with
1199    PCGASMCreateSubdomains(). Should be called after setting subdomains
1200    with PCGASMSetLocalSubdomains().
1201 
1202    Collective
1203 
1204    Input Parameters:
1205 +  n - the number of index sets
1206 .  is - the array of index sets
1207 -  is_local - the array of local index sets, can be PETSC_NULL
1208 
1209    Level: advanced
1210 
1211 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1212 
1213 .seealso: PCGASMCreateSubdomains(), PCGASMSetLocalSubdomains()
1214 @*/
1215 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1216 {
1217   PetscInt       i;
1218   PetscErrorCode ierr;
1219   PetscFunctionBegin;
1220   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1221   PetscValidPointer(is,2);
1222   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1223   ierr = PetscFree(is);CHKERRQ(ierr);
1224   if (is_local) {
1225     PetscValidPointer(is_local,3);
1226     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1227     ierr = PetscFree(is_local);CHKERRQ(ierr);
1228   }
1229   PetscFunctionReturn(0);
1230 }
1231 
1232 #undef __FUNCT__
1233 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1234 /*@
1235    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1236    preconditioner for a two-dimensional problem on a regular grid.
1237 
1238    Not Collective
1239 
1240    Input Parameters:
1241 +  m, n - the number of mesh points in the x and y directions
1242 .  M, N - the number of subdomains in the x and y directions
1243 .  dof - degrees of freedom per node
1244 -  overlap - overlap in mesh lines
1245 
1246    Output Parameters:
1247 +  Nsub - the number of subdomains created
1248 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1249 -  is_local - array of index sets defining non-overlapping subdomains
1250 
1251    Note:
1252    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1253    preconditioners.  More general related routines are
1254    PCGASMSetTotalSubdomains() and PCGASMSetLocalSubdomains().
1255 
1256    Level: advanced
1257 
1258 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1259 
1260 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
1261           PCGASMSetOverlap()
1262 @*/
1263 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1264 {
1265   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1266   PetscErrorCode ierr;
1267   PetscInt       nidx,*idx,loc,ii,jj,count;
1268 
1269   PetscFunctionBegin;
1270   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1271 
1272   *Nsub = N*M;
1273   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1274   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1275   ystart = 0;
1276   loc_outer = 0;
1277   for (i=0; i<N; i++) {
1278     height = n/N + ((n % N) > i); /* height of subdomain */
1279     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1280     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1281     yright = ystart + height + overlap; if (yright > n) yright = n;
1282     xstart = 0;
1283     for (j=0; j<M; j++) {
1284       width = m/M + ((m % M) > j); /* width of subdomain */
1285       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1286       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1287       xright = xstart + width + overlap; if (xright > m) xright = m;
1288       nidx   = (xright - xleft)*(yright - yleft);
1289       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1290       loc    = 0;
1291       for (ii=yleft; ii<yright; ii++) {
1292         count = m*ii + xleft;
1293         for (jj=xleft; jj<xright; jj++) {
1294           idx[loc++] = count++;
1295         }
1296       }
1297       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1298       if (overlap == 0) {
1299         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1300         (*is_local)[loc_outer] = (*is)[loc_outer];
1301       } else {
1302         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1303           for (jj=xstart; jj<xstart+width; jj++) {
1304             idx[loc++] = m*ii + jj;
1305           }
1306         }
1307         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1308       }
1309       ierr = PetscFree(idx);CHKERRQ(ierr);
1310       xstart += width;
1311       loc_outer++;
1312     }
1313     ystart += height;
1314   }
1315   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1316   PetscFunctionReturn(0);
1317 }
1318 
1319 #undef __FUNCT__
1320 #define __FUNCT__ "PCGASMGetLocalSubdomains"
1321 /*@C
1322     PCGASMGetLocalSubdomains - Gets the local subdomains (for this processor
1323     only) for the additive Schwarz preconditioner.
1324 
1325     Not Collective
1326 
1327     Input Parameter:
1328 .   pc - the preconditioner context
1329 
1330     Output Parameters:
1331 +   n - the number of subdomains for this processor (default value = 1)
1332 .   is - the index sets that define the subdomains for this processor
1333 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1334 
1335 
1336     Notes:
1337     The IS numbering is in the parallel, global numbering of the vector.
1338 
1339     Level: advanced
1340 
1341 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
1342 
1343 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1344           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubmatrices()
1345 @*/
1346 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1347 {
1348   PC_GASM         *osm;
1349   PetscErrorCode ierr;
1350   PetscBool      match;
1351 
1352   PetscFunctionBegin;
1353   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1354   PetscValidIntPointer(n,2);
1355   if (is) PetscValidPointer(is,3);
1356   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1357   if (!match) {
1358     if (n)  *n  = 0;
1359     if (is) *is = PETSC_NULL;
1360   } else {
1361     osm = (PC_GASM*)pc->data;
1362     if (n)  *n  = osm->n;
1363     if (is) *is = osm->is;
1364     if (is_local) *is_local = osm->is_local;
1365   }
1366   PetscFunctionReturn(0);
1367 }
1368 
1369 #undef __FUNCT__
1370 #define __FUNCT__ "PCGASMGetLocalSubmatrices"
1371 /*@C
1372     PCGASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1373     only) for the additive Schwarz preconditioner.
1374 
1375     Not Collective
1376 
1377     Input Parameter:
1378 .   pc - the preconditioner context
1379 
1380     Output Parameters:
1381 +   n - the number of matrices for this processor (default value = 1)
1382 -   mat - the matrices
1383 
1384 
1385     Level: advanced
1386 
1387 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1388 
1389 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1390           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubdomains()
1391 @*/
1392 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1393 {
1394   PC_GASM         *osm;
1395   PetscErrorCode ierr;
1396   PetscBool      match;
1397 
1398   PetscFunctionBegin;
1399   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1400   PetscValidIntPointer(n,2);
1401   if (mat) PetscValidPointer(mat,3);
1402   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1403   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1404   if (!match) {
1405     if (n)   *n   = 0;
1406     if (mat) *mat = PETSC_NULL;
1407   } else {
1408     osm = (PC_GASM*)pc->data;
1409     if (n)   *n   = osm->n;
1410     if (mat) *mat = osm->pmat;
1411   }
1412   PetscFunctionReturn(0);
1413 }
1414