xref: /petsc/src/ksp/pc/impls/gasm/gasm.c (revision 44fe18e562734387d21f9f2fea005359e474446e)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation.
5   In this version each processor may have any number of subdomains and a subdomain may live on multiple
6   processors.
7 
8        N    - total number of true subdomains on all processors
9        n    - actual number of subdomains on this processor
10        nmax - maximum number of subdomains per processor
11 */
12 #include "private/pcimpl.h"     /*I "petscpc.h" I*/
13 
14 typedef struct {
15   PetscInt   N,n,nmax;
16   PetscInt   overlap;             /* overlap requested by user */
17   KSP        *ksp;                /* linear solvers for each block */
18   Vec        gx,gy;               /* Merged work vectors */
19   Vec        *x,*y;               /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */
20   IS         gis, gis_local;      /* merged ISs */
21   VecScatter grestriction;        /* merged restriction */
22   VecScatter gprolongation;       /* merged prolongation */
23   IS         *is;                 /* index set that defines each overlapping subdomain */
24   IS         *is_local;           /* index set that defines each local subdomain (same as subdomain with the overlap removed); may be NULL */
25   Mat        *pmat;               /* subdomain block matrices */
26   PCGASMType  type;               /* use reduced interpolation, restriction or both */
27   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
28   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
29   PetscBool  sort_indices;        /* flag to sort subdomain indices */
30 } PC_GASM;
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "PCView_GASM"
34 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer)
35 {
36   PC_GASM         *osm = (PC_GASM*)pc->data;
37   PetscErrorCode ierr;
38   PetscMPIInt    rank;
39   PetscInt       i,bsz;
40   PetscBool      iascii,isstring;
41   PetscViewer    sviewer;
42 
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
47   if (iascii) {
48     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
49     if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof overlaps,"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);}
50     if (osm->nmax > 0)     {ierr = PetscSNPrintf(blocks,sizeof blocks,"max number of  subdomain blocks = %D",osm->nmax);CHKERRQ(ierr);}
51     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n);CHKERRQ(ierr);
52     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr);
53     ierr = PetscViewerASCIIPrintf(viewer,"  Generalized additive Schwarz: restriction/interpolation type - %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr);
54     ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
55     if (osm->same_local_solves) {
56       if (osm->ksp) {
57         ierr = PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr);
58         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
59         if (!rank) {
60           ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
61           ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);
62           ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
63         }
64         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
65       }
66     } else {
67       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
68       ierr = PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr);
69       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
70       ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
71       for (i=0; i<osm->nmax; i++) {
72         ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
73         if (i < osm->n) {
74           ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr);
75           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr);
76           ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr);
77           ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr);
78         }
79         ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
80       }
81       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
82       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
83     }
84   } else if (isstring) {
85     ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCGASMTypes[osm->type]);CHKERRQ(ierr);
86     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
87     if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);}
88     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
89   } else {
90     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "PCGASMPrintSubdomains"
97 static PetscErrorCode PCGASMPrintSubdomains(PC pc)
98 {
99   PC_GASM         *osm  = (PC_GASM*)pc->data;
100   const char     *prefix;
101   char           fname[PETSC_MAX_PATH_LEN+1];
102   PetscViewer    viewer;
103   PetscInt       i,j,nidx;
104   const PetscInt *idx;
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
109   ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,PETSC_NULL);CHKERRQ(ierr);
110   if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); };
111   ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr);
112   for (i=0;i<osm->n;++i) {
113     ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr);
114     ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr);
115     for (j=0; j<nidx; j++) {
116       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
117     }
118     ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr);
119     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
120     if (osm->is_local) {
121       ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr);
122       ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
123       for (j=0; j<nidx; j++) {
124         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"%D ",idx[j]);CHKERRQ(ierr);
125       }
126       ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr);
127       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
128     }
129   }
130   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
131   ierr = PetscViewerDestroy(viewer);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PCSetUp_GASM"
137 static PetscErrorCode PCSetUp_GASM(PC pc)
138 {
139   PC_GASM         *osm  = (PC_GASM*)pc->data;
140   PetscErrorCode ierr;
141   PetscBool      symset,flg;
142   PetscInt       i,firstRow,lastRow;
143   PetscMPIInt    size;
144   MatReuse       scall = MAT_REUSE_MATRIX;
145   KSP            ksp;
146   PC             subpc;
147   const char     *prefix,*pprefix;
148   PetscInt       dn;       /* Number of indices in a single subdomain assigned to this processor. */
149   const PetscInt *didx;    /* Indices from a single subdomain assigned to this processor. */
150   PetscInt       ddn;      /* Number of indices in all subdomains assigned to this processor. */
151   PetscInt       *ddidx;   /* Indices of all subdomains assigned to this processor. */
152   IS             gid;      /* Identity IS of the size of all subdomains assigned to this processor. */
153   Vec            x,y;
154   PetscScalar    *gxarray, *gyarray;
155   IS             *is;
156 
157   PetscFunctionBegin;
158   if (!pc->setupcalled) {
159 
160     if (!osm->type_set) {
161       ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
162       if (symset && flg) { osm->type = PC_GASM_BASIC; }
163     }
164 
165     if (osm->N == PETSC_DECIDE && osm->n < 1) {
166       /* no subdomains given, use one per processor */
167       osm->nmax = osm->n = 1;
168       ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
169       osm->N = size;
170     } else if (osm->N == PETSC_DECIDE) {
171       PetscInt inwork[2], outwork[2];
172       /* determine global number of subdomains and the max number of local subdomains */
173       inwork[0] = inwork[1] = osm->n;
174       ierr = MPI_Allreduce(inwork,outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr);
175       osm->nmax = outwork[0];
176       osm->N    = outwork[1];
177     }
178     if (!osm->is){ /* create the index sets */
179       ierr = PCGASMCreateSubdomains(pc->pmat,osm->n,&osm->is);CHKERRQ(ierr);
180     }
181     if (osm->n > 1 && !osm->is_local) {
182       ierr = PetscMalloc(osm->n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
183       for (i=0; i<osm->n; i++) {
184         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
185           ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr);
186           ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr);
187         } else {
188           ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr);
189           osm->is_local[i] = osm->is[i];
190         }
191       }
192     }
193     ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
194     flg  = PETSC_FALSE;
195     ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr);
196     if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); }
197 
198     if (osm->overlap > 0) {
199       /* Extend the "overlapping" regions by a number of steps */
200       ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->is,osm->overlap);CHKERRQ(ierr);
201     }
202     if (osm->sort_indices) {
203       for (i=0; i<osm->n; i++) {
204         ierr = ISSort(osm->is[i]);CHKERRQ(ierr);
205         if (osm->is_local) {
206           ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr);
207         }
208       }
209     }
210     /* Merge the ISs, create merged vectors and scatter contexts. */
211     /* Domain IS. */
212     ddn = 0;
213     for (i=0; i<osm->n; i++) {
214       ierr = ISGetLocalSize(osm->is[i],&dn);          CHKERRQ(ierr);
215       ddn += dn;
216     }
217     ierr = PetscMalloc(ddn*sizeof(PetscInt), &ddidx); CHKERRQ(ierr);
218     ddn = 0;
219     for (i=0; i<osm->n; i++) {
220       ierr = ISGetLocalSize(osm->is[i],&dn); CHKERRQ(ierr);
221       ierr = ISGetIndices(osm->is[i],&didx); CHKERRQ(ierr);
222       ierr = PetscMemcpy(ddidx+ddn, didx, sizeof(PetscInt)*dn); CHKERRQ(ierr);
223       ierr = ISRestoreIndices(osm->is[i], &didx); CHKERRQ(ierr);
224     }
225     ierr = ISCreateGeneral(((PetscObject)(pc))->comm, ddn, ddidx, PETSC_OWN_POINTER, &osm->gis); CHKERRQ(ierr);
226     ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr);
227     ierr = VecCreateMPI(((PetscObject)pc)->comm, ddn, PETSC_DECIDE, &osm->gx); CHKERRQ(ierr);
228     ierr = VecDuplicate(osm->gx,&osm->gy);                                     CHKERRQ(ierr);
229     ierr = ISCreateStride(((PetscObject)pc)->comm,ddn,0,1, &gid);              CHKERRQ(ierr);
230     ierr = VecScatterCreate(x,osm->gis,osm->gx,gid, &(osm->grestriction));   CHKERRQ(ierr);
231     ierr = ISDestroy(gid);                                                     CHKERRQ(ierr);
232     /* Local domain IS */
233     if(osm->is_local) { /* All ranks either have this or not, so collective calls within this branch are okay. */
234       PetscInt       dn_local;       /* Number of indices in the local part of single domain assigned to this processor. */
235       const PetscInt *didx_local;    /* Global indices from the local part of a single domain assigned to this processor. */
236       PetscInt       ddn_local;      /* Number of indices in the local part of the disjoint union all domains assigned to this processor. */
237       PetscInt       *ddidx_local;   /* Global indices of the local part of the disjoint union of all domains assigned to this processor. */
238       /**/
239       ISLocalToGlobalMapping ltog;          /* Mapping from global to local indices on the disjoint union of subdomains: local run from 0 to ddn-1. */
240       PetscInt              *ddidx_llocal;  /* Local (within disjoint union of subdomains) indices of the disjoint union of local parts of subdomains. */
241       PetscInt               ddn_llocal;    /* Number of indices in ddidx_llocal; must equal ddn_local, or else gis_local is not a sub-IS of gis. */
242       IS                     gis_llocal;    /* IS with ddidx_llocal indices. */
243       PetscInt               j;
244       ddn_local = 0;
245       for (i=0; i<osm->n; i++) {
246 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
247 	ddn_local += dn_local;
248       }
249       ierr = PetscMalloc(ddn_local*sizeof(PetscInt), &ddidx_local); CHKERRQ(ierr);
250       ddn_local = 0;
251       for (i=0; i<osm->n; i++) {
252 	ierr = ISGetLocalSize(osm->is_local[i],&dn_local); CHKERRQ(ierr);
253 	ierr = ISGetIndices(osm->is_local[i],&didx_local); CHKERRQ(ierr);
254 	ierr = PetscMemcpy(ddidx_local+ddn_local, didx_local, sizeof(PetscInt)*dn_local); CHKERRQ(ierr);
255 	ierr = ISRestoreIndices(osm->is_local[i], &didx_local); CHKERRQ(ierr);
256 	ddn_local += dn_local;
257       }
258       ierr = VecGetOwnershipRange(y, &firstRow, &lastRow); CHKERRQ(ierr);
259       if(ddn_local != lastRow-firstRow) {
260 	SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Specified local domains of total size %D do not cover the local vector range [%D,%D)",
261 		 ddn_local, firstRow, lastRow);
262       }
263       ierr = PetscMalloc(sizeof(PetscInt)*ddn_local, &ddidx_llocal); CHKERRQ(ierr);
264       ierr = ISCreateGeneral(((PetscObject)pc)->comm, ddn_local, ddidx_local, PETSC_OWN_POINTER, &(osm->gis_local)); CHKERRQ(ierr);
265       ierr = ISLocalToGlobalMappingCreateIS(osm->gis,&ltog);CHKERRQ(ierr);
266       ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,dn_local,ddidx_local,&ddn_llocal,ddidx_llocal);CHKERRQ(ierr);
267       ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
268       if (ddn_llocal != ddn_local) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"gis_local contains %D nonlocal indices", ddn_llocal - ddn_local);
269       /* Now convert these localized indices into the global indices into the merged output vector. */
270       ierr = VecGetOwnershipRange(osm->gy, &firstRow, &lastRow); CHKERRQ(ierr);
271       for(j=0; j < ddn_llocal; ++j) {
272 	ddidx_llocal[j] += firstRow;
273       }
274       ierr = ISCreateGeneral(PETSC_COMM_SELF,ddn_llocal,ddidx_llocal,PETSC_OWN_POINTER,&gis_llocal); CHKERRQ(ierr);
275       ierr = VecScatterCreate(y,osm->gis_local,osm->gy,gis_llocal,&osm->gprolongation);              CHKERRQ(ierr);
276       ierr = ISDestroy(gis_llocal);CHKERRQ(ierr);
277 
278     }
279     /* Create the subdomain work vectors. */
280     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr);
281     ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr);
282     ierr = VecGetOwnershipRange(osm->gx, &firstRow, &lastRow);CHKERRQ(ierr);
283     ierr = VecGetArray(osm->gx, &gxarray);                                     CHKERRQ(ierr);
284     ierr = VecGetArray(osm->gy, &gyarray);                                     CHKERRQ(ierr);
285     for (i=0, ddn=0; i<osm->n; ++i, ddn += dn) {
286       ierr = ISGetLocalSize(osm->is[i],&dn);CHKERRQ(ierr);
287       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gxarray+ddn,&osm->x[i]);CHKERRQ(ierr);
288       ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dn,gyarray+ddn,&osm->y[i]);CHKERRQ(ierr);
289     }
290     ierr = VecRestoreArray(osm->gx, &gxarray); CHKERRQ(ierr);
291     ierr = VecRestoreArray(osm->gy, &gyarray); CHKERRQ(ierr);
292     ierr = VecDestroy(x); CHKERRQ(ierr);
293     ierr = VecDestroy(y); CHKERRQ(ierr);
294     /* Create the local solvers */
295     ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr);
296     for (i=0; i<osm->n; i++) { /* KSPs are local */
297       ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr);
298       ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr);
299       ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr);
300       ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr);
301       ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr);
302       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
303       ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr);
304       ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr);
305       osm->ksp[i] = ksp;
306     }
307     scall = MAT_INITIAL_MATRIX;
308 
309   } else {
310     /*
311        Destroy the blocks from the previous iteration
312     */
313     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
314       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
315       scall = MAT_INITIAL_MATRIX;
316     }
317   }
318 
319   /*
320      Extract out the submatrices.
321   */
322   ierr = PetscMalloc(sizeof(IS)*osm->nmax, &is); CHKERRQ(ierr);
323   for(i=0; i<osm->n; ++i) {
324     is[i] = osm->is[i];
325   }
326   for (i=osm->n; i<osm->nmax; i++) {
327     ierr = ISCreateStride(((PetscObject)pc)->comm,0,0,1,&is[i]);CHKERRQ(ierr);
328   }
329   ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->is,is,scall,&osm->pmat);CHKERRQ(ierr);
330   if (scall == MAT_INITIAL_MATRIX) {
331     ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr);
332     for (i=0; i<osm->n; i++) {
333       ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr);
334       ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr);
335     }
336   }
337 
338   /* Return control to the user so that the submatrices can be modified (e.g., to apply
339      different boundary conditions for the submatrices than for the global problem) */
340   ierr = PCModifySubMatrices(pc,osm->n,is,is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr);
341   for (i=osm->n; i<osm->nmax; i++) {
342     ierr = ISDestroy(is[i]);CHKERRQ(ierr);
343   }
344   ierr = PetscFree(is); CHKERRQ(ierr);
345 
346   /*
347      Loop over submatrices putting them into local ksp
348   */
349   for (i=0; i<osm->n; i++) {
350     ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr);
351     if (!pc->setupcalled) {
352       ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr);
353     }
354   }
355 
356   PetscFunctionReturn(0);
357 }
358 
359 #undef __FUNCT__
360 #define __FUNCT__ "PCSetUpOnBlocks_GASM"
361 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc)
362 {
363   PC_GASM         *osm = (PC_GASM*)pc->data;
364   PetscErrorCode ierr;
365   PetscInt       i;
366 
367   PetscFunctionBegin;
368   for (i=0; i<osm->n; i++) {
369     ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr);
370   }
371   PetscFunctionReturn(0);
372 }
373 
374 #undef __FUNCT__
375 #define __FUNCT__ "PCApply_GASM"
376 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y)
377 {
378   PC_GASM         *osm = (PC_GASM*)pc->data;
379   PetscErrorCode ierr;
380   PetscInt       i;
381   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
382 
383   PetscFunctionBegin;
384   /*
385      Support for limiting the restriction or interpolation to only local
386      subdomain values (leaving the other values 0).
387   */
388   if (!(osm->type & PC_GASM_RESTRICT)) {
389     forward = SCATTER_FORWARD_LOCAL;
390     /* have to zero the work RHS since scatter may leave some slots empty */
391     ierr = VecZeroEntries(osm->gx); CHKERRQ(ierr);
392   }
393   if (!(osm->type & PC_GASM_INTERPOLATE)) {
394     reverse = SCATTER_REVERSE_LOCAL;
395   }
396 
397   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
398   ierr = VecZeroEntries(y);CHKERRQ(ierr);
399   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
400   /* do the local solves */
401   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
402     ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
403   }
404   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
405   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
406   PetscFunctionReturn(0);
407 }
408 
409 #undef __FUNCT__
410 #define __FUNCT__ "PCApplyTranspose_GASM"
411 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y)
412 {
413   PC_GASM         *osm = (PC_GASM*)pc->data;
414   PetscErrorCode ierr;
415   PetscInt       i;
416   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;
417 
418   PetscFunctionBegin;
419   /*
420      Support for limiting the restriction or interpolation to only local
421      subdomain values (leaving the other values 0).
422 
423      Note: these are reversed from the PCApply_GASM() because we are applying the
424      transpose of the three terms
425   */
426   if (!(osm->type & PC_GASM_INTERPOLATE)) {
427     forward = SCATTER_FORWARD_LOCAL;
428     /* have to zero the work RHS since scatter may leave some slots empty */
429     ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr);
430   }
431   if (!(osm->type & PC_GASM_RESTRICT)) {
432     reverse = SCATTER_REVERSE_LOCAL;
433   }
434 
435   ierr = VecScatterBegin(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
436   ierr = VecZeroEntries(y);CHKERRQ(ierr);
437   ierr = VecScatterEnd(osm->grestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr);
438   /* do the local solves */
439   for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */
440     ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr);
441   }
442   ierr = VecScatterBegin(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
443   ierr = VecScatterEnd(osm->gprolongation,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr);
444   PetscFunctionReturn(0);
445 }
446 
447 #undef __FUNCT__
448 #define __FUNCT__ "PCDestroy_GASM"
449 static PetscErrorCode PCDestroy_GASM(PC pc)
450 {
451   PC_GASM         *osm = (PC_GASM*)pc->data;
452   PetscErrorCode ierr;
453   PetscInt       i;
454 
455   PetscFunctionBegin;
456   if (osm->ksp) {
457     for (i=0; i<osm->n; i++) {
458       ierr = KSPDestroy(osm->ksp[i]);CHKERRQ(ierr);
459     }
460     ierr = PetscFree(osm->ksp);CHKERRQ(ierr);
461   }
462   if (osm->pmat) {
463     if (osm->n > 0) {
464       ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr);
465     }
466   }
467   for (i=0; i<osm->n; i++) {
468     ierr = VecDestroy(osm->x[i]);CHKERRQ(ierr);
469     ierr = VecDestroy(osm->y[i]);CHKERRQ(ierr);
470   }
471   ierr = PetscFree(osm->x);CHKERRQ(ierr);
472   ierr = PetscFree(osm->y);CHKERRQ(ierr);
473   ierr = VecDestroy(osm->gx); CHKERRQ(ierr);
474   ierr = VecDestroy(osm->gy); CHKERRQ(ierr);
475 
476   ierr = VecScatterDestroy(osm->grestriction); CHKERRQ(ierr);
477   ierr = VecScatterDestroy(osm->gprolongation); CHKERRQ(ierr);
478 
479   if (osm->is) {
480     ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
481   }
482   ierr = PetscFree(osm);CHKERRQ(ierr);
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "PCSetFromOptions_GASM"
488 static PetscErrorCode PCSetFromOptions_GASM(PC pc) {
489   PC_GASM         *osm = (PC_GASM*)pc->data;
490   PetscErrorCode ierr;
491   PetscInt       blocks,ovl;
492   PetscBool      symset,flg;
493   PCGASMType      gasmtype;
494 
495   PetscFunctionBegin;
496   /* set the type to symmetric if matrix is symmetric */
497   if (!osm->type_set && pc->pmat) {
498     ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr);
499     if (symset && flg) { osm->type = PC_GASM_BASIC; }
500   }
501   ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr);
502     ierr = PetscOptionsInt("-pc_gasm_blocks","Number of subdomains","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr);
503     if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks);CHKERRQ(ierr); }
504     ierr = PetscOptionsInt("-pc_gasm_overlap","Number of grid points overlap","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr);
505     if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); }
506     flg  = PETSC_FALSE;
507     ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr);
508     if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); }
509   ierr = PetscOptionsTail();CHKERRQ(ierr);
510   PetscFunctionReturn(0);
511 }
512 
513 /*------------------------------------------------------------------------------------*/
514 
515 EXTERN_C_BEGIN
516 #undef __FUNCT__
517 #define __FUNCT__ "PCGASMSetLocalSubdomains_GASM"
518 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains_GASM(PC pc,PetscInt n,IS is[],IS is_local[])
519 {
520   PC_GASM         *osm = (PC_GASM*)pc->data;
521   PetscErrorCode ierr;
522   PetscInt       i;
523 
524   PetscFunctionBegin;
525   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
526   if (pc->setupcalled && (n != osm->n || is)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetLocalSubdomains() should be called before calling PCSetUp().");
527 
528   if (!pc->setupcalled) {
529     if (is) {
530       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);}
531     }
532     if (is_local) {
533       for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);}
534     }
535     if (osm->is) {
536       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
537     }
538     osm->n            = n;
539     osm->is           = 0;
540     osm->is_local     = 0;
541     if (is) {
542       ierr = PetscMalloc(n*sizeof(IS),&osm->is);CHKERRQ(ierr);
543       for (i=0; i<n; i++) { osm->is[i] = is[i]; }
544       /* Flag indicating that the user has set overlapping subdomains so PCGASM should not increase their size. */
545       osm->overlap = -1;
546     }
547     if (is_local) {
548       ierr = PetscMalloc(n*sizeof(IS),&osm->is_local);CHKERRQ(ierr);
549       for (i=0; i<n; i++) { osm->is_local[i] = is_local[i]; }
550     }
551   }
552   PetscFunctionReturn(0);
553 }
554 EXTERN_C_END
555 
556 EXTERN_C_BEGIN
557 #undef __FUNCT__
558 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM"
559 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N) {
560   PC_GASM         *osm = (PC_GASM*)pc->data;
561   PetscErrorCode ierr;
562   PetscMPIInt    rank,size;
563   PetscInt       n;
564 
565   PetscFunctionBegin;
566   if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
567 
568   /*
569      Split the subdomains equally among all processors
570   */
571   ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr);
572   ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr);
573   n = N/size + ((N % size) > rank);
574   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
575   if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp().");
576   if (!pc->setupcalled) {
577     if (osm->is) {
578       ierr = PCGASMDestroySubdomains(osm->n,osm->is,osm->is_local);CHKERRQ(ierr);
579     }
580     osm->n            = n;
581     osm->is           = 0;
582     osm->is_local     = 0;
583   }
584   PetscFunctionReturn(0);
585 }/* PCGASMSetTotalSubdomains_GASM() */
586 EXTERN_C_END
587 
588 EXTERN_C_BEGIN
589 #undef __FUNCT__
590 #define __FUNCT__ "PCGASMSetOverlap_GASM"
591 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap_GASM(PC pc,PetscInt ovl)
592 {
593   PC_GASM *osm = (PC_GASM*)pc->data;
594 
595   PetscFunctionBegin;
596   if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
597   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp().");
598   if (!pc->setupcalled) {
599     osm->overlap = ovl;
600   }
601   PetscFunctionReturn(0);
602 }
603 EXTERN_C_END
604 
605 EXTERN_C_BEGIN
606 #undef __FUNCT__
607 #define __FUNCT__ "PCGASMSetType_GASM"
608 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType_GASM(PC pc,PCGASMType type)
609 {
610   PC_GASM *osm = (PC_GASM*)pc->data;
611 
612   PetscFunctionBegin;
613   osm->type     = type;
614   osm->type_set = PETSC_TRUE;
615   PetscFunctionReturn(0);
616 }
617 EXTERN_C_END
618 
619 EXTERN_C_BEGIN
620 #undef __FUNCT__
621 #define __FUNCT__ "PCGASMSetSortIndices_GASM"
622 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices_GASM(PC pc,PetscBool  doSort)
623 {
624   PC_GASM *osm = (PC_GASM*)pc->data;
625 
626   PetscFunctionBegin;
627   osm->sort_indices = doSort;
628   PetscFunctionReturn(0);
629 }
630 EXTERN_C_END
631 
632 EXTERN_C_BEGIN
633 #undef __FUNCT__
634 #define __FUNCT__ "PCGASMGetSubKSP_GASM"
635 /*
636    FIX: This routine might need to be modified once multiple ranks per subdomain are allowed.
637         In particular, it would upset the global subdomain number calculation.
638 */
639 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp)
640 {
641   PC_GASM         *osm = (PC_GASM*)pc->data;
642   PetscErrorCode ierr;
643 
644   PetscFunctionBegin;
645   if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");
646 
647   if (n) {
648     *n = osm->n;
649   }
650   if (first) {
651     ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
652     *first -= osm->n;
653   }
654   if (ksp) {
655     /* Assume that local solves are now different; not necessarily
656        true though!  This flag is used only for PCView_GASM() */
657     *ksp                   = osm->ksp;
658     osm->same_local_solves = PETSC_FALSE;
659   }
660   PetscFunctionReturn(0);
661 }/* PCGASMGetSubKSP_GASM() */
662 EXTERN_C_END
663 
664 
665 #undef __FUNCT__
666 #define __FUNCT__ "PCGASMSetLocalSubdomains"
667 /*@C
668     PCGASMSetLocalSubdomains - Sets the local subdomains (for this processor
669     only) for the additive Schwarz preconditioner.
670 
671     Collective on PC
672 
673     Input Parameters:
674 +   pc - the preconditioner context
675 .   n - the number of subdomains for this processor (default value = 1)
676 .   is - the index set that defines the subdomains for this processor
677          (or PETSC_NULL for PETSc to determine subdomains)
678 -   is_local - the index sets that define the local part of the subdomains for this processor
679          (or PETSC_NULL to use the default of 1 subdomain per process)
680 
681     Notes:
682     The IS numbering is in the parallel, global numbering of the vector.
683 
684     By default the GASM preconditioner uses 1 block per processor.
685 
686     Use PCGASMSetTotalSubdomains() to set the subdomains for all processors.
687 
688     Level: advanced
689 
690 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
691 
692 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
693           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
694 @*/
695 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
696 {
697   PetscErrorCode ierr;
698 
699   PetscFunctionBegin;
700   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
701   ierr = PetscTryMethod(pc,"PCGASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr);
702   PetscFunctionReturn(0);
703 }
704 
705 #undef __FUNCT__
706 #define __FUNCT__ "PCGASMSetTotalSubdomains"
707 /*@C
708     PCGASMSetTotalSubdomains - Sets the subdomains for all processor for the
709     additive Schwarz preconditioner.  Either all or no processors in the
710     PC communicator must call this routine, with the same index sets.
711 
712     Collective on PC
713 
714     Input Parameters:
715 +   pc - the preconditioner context
716 .   n - the number of subdomains for all processors
717 .   is - the index sets that define the subdomains for all processor
718          (or PETSC_NULL for PETSc to determine subdomains)
719 -   is_local - the index sets that define the local part of the subdomains for this processor
720          (or PETSC_NULL to use the default of 1 subdomain per process)
721 
722     Options Database Key:
723     To set the total number of subdomain blocks rather than specify the
724     index sets, use the option
725 .    -pc_gasm_blocks <blks> - Sets total blocks
726 
727     Notes:
728     Currently you cannot use this to set the actual subdomains with the argument is.
729 
730     By default the GASM preconditioner uses 1 block per processor.
731 
732     These index sets cannot be destroyed until after completion of the
733     linear solves for which the GASM preconditioner is being used.
734 
735     Use PCGASMSetLocalSubdomains() to set local subdomains.
736 
737     Level: advanced
738 
739 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz
740 
741 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
742           PCGASMCreateSubdomains2D()
743 @*/
744 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetTotalSubdomains(PC pc,PetscInt N)
745 {
746   PetscErrorCode ierr;
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
750   ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt),(pc,N));CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 #undef __FUNCT__
755 #define __FUNCT__ "PCGASMSetOverlap"
756 /*@
757     PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the
758     additive Schwarz preconditioner.  Either all or no processors in the
759     PC communicator must call this routine.
760 
761     Logically Collective on PC
762 
763     Input Parameters:
764 +   pc  - the preconditioner context
765 -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)
766 
767     Options Database Key:
768 .   -pc_gasm_overlap <ovl> - Sets overlap
769 
770     Notes:
771     By default the GASM preconditioner uses 1 block per processor.  To use
772     multiple blocks per perocessor, see PCGASMSetTotalSubdomains() and
773     PCGASMSetLocalSubdomains() (and the option -pc_gasm_blocks <blks>).
774 
775     The overlap defaults to 1, so if one desires that no additional
776     overlap be computed beyond what may have been set with a call to
777     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(), then ovl
778     must be set to be 0.  In particular, if one does not explicitly set
779     the subdomains an application code, then all overlap would be computed
780     internally by PETSc, and using an overlap of 0 would result in an GASM
781     variant that is equivalent to the block Jacobi preconditioner.
782 
783     Note that one can define initial index sets with any overlap via
784     PCGASMSetTotalSubdomains() or PCGASMSetLocalSubdomains(); the routine
785     PCGASMSetOverlap() merely allows PETSc to extend that overlap further
786     if desired.
787 
788     Level: intermediate
789 
790 .keywords: PC, GASM, set, overlap
791 
792 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
793           PCGASMCreateSubdomains2D(), PCGASMGetLocalSubdomains()
794 @*/
795 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetOverlap(PC pc,PetscInt ovl)
796 {
797   PetscErrorCode ierr;
798 
799   PetscFunctionBegin;
800   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
801   PetscValidLogicalCollectiveInt(pc,ovl,2);
802   ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr);
803   PetscFunctionReturn(0);
804 }
805 
806 #undef __FUNCT__
807 #define __FUNCT__ "PCGASMSetType"
808 /*@
809     PCGASMSetType - Sets the type of restriction and interpolation used
810     for local problems in the additive Schwarz method.
811 
812     Logically Collective on PC
813 
814     Input Parameters:
815 +   pc  - the preconditioner context
816 -   type - variant of GASM, one of
817 .vb
818       PC_GASM_BASIC       - full interpolation and restriction
819       PC_GASM_RESTRICT    - full restriction, local processor interpolation
820       PC_GASM_INTERPOLATE - full interpolation, local processor restriction
821       PC_GASM_NONE        - local processor restriction and interpolation
822 .ve
823 
824     Options Database Key:
825 .   -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
826 
827     Level: intermediate
828 
829 .keywords: PC, GASM, set, type
830 
831 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
832           PCGASMCreateSubdomains2D()
833 @*/
834 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetType(PC pc,PCGASMType type)
835 {
836   PetscErrorCode ierr;
837 
838   PetscFunctionBegin;
839   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
840   PetscValidLogicalCollectiveEnum(pc,type,2);
841   ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr);
842   PetscFunctionReturn(0);
843 }
844 
845 #undef __FUNCT__
846 #define __FUNCT__ "PCGASMSetSortIndices"
847 /*@
848     PCGASMSetSortIndices - Determines whether subdomain indices are sorted.
849 
850     Logically Collective on PC
851 
852     Input Parameters:
853 +   pc  - the preconditioner context
854 -   doSort - sort the subdomain indices
855 
856     Level: intermediate
857 
858 .keywords: PC, GASM, set, type
859 
860 .seealso: PCGASMSetLocalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(),
861           PCGASMCreateSubdomains2D()
862 @*/
863 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMSetSortIndices(PC pc,PetscBool  doSort)
864 {
865   PetscErrorCode ierr;
866 
867   PetscFunctionBegin;
868   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
869   PetscValidLogicalCollectiveBool(pc,doSort,2);
870   ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr);
871   PetscFunctionReturn(0);
872 }
873 
874 #undef __FUNCT__
875 #define __FUNCT__ "PCGASMGetSubKSP"
876 /*@C
877    PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on
878    this processor.
879 
880    Collective on PC iff first_local is requested
881 
882    Input Parameter:
883 .  pc - the preconditioner context
884 
885    Output Parameters:
886 +  n_local - the number of blocks on this processor or PETSC_NULL
887 .  first_local - the global number of the first block on this processor or PETSC_NULL,
888                  all processors must request or all must pass PETSC_NULL
889 -  ksp - the array of KSP contexts
890 
891    Note:
892    After PCGASMGetSubKSP() the array of KSPes is not to be freed
893 
894    Currently for some matrix implementations only 1 block per processor
895    is supported.
896 
897    You must call KSPSetUp() before calling PCGASMGetSubKSP().
898 
899    Level: advanced
900 
901 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context
902 
903 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetTotalSubdomains(), PCGASMSetOverlap(),
904           PCGASMCreateSubdomains2D(),
905 @*/
906 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
907 {
908   PetscErrorCode ierr;
909 
910   PetscFunctionBegin;
911   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
912   ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr);
913   PetscFunctionReturn(0);
914 }
915 
916 /* -------------------------------------------------------------------------------------*/
917 /*MC
918    PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
919            its own KSP object.
920 
921    Options Database Keys:
922 +  -pc_gasm_truelocal - Activates PCGGASMSetUseTrueLocal()
923 .  -pc_gasm_blocks <blks> - Sets total blocks
924 .  -pc_gasm_overlap <ovl> - Sets overlap
925 -  -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type
926 
927      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
928       will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use
929       -pc_gasm_type basic to use the standard GASM.
930 
931    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
932      than one processor. Defaults to one block per processor.
933 
934      To set options on the solvers for each block append -sub_ to all the KSP, and PC
935         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly
936 
937      To set the options on the solvers separate for each block call PCGASMGetSubKSP()
938          and set the options directly on the resulting KSP object (you can access its PC
939          with KSPGetPC())
940 
941 
942    Level: beginner
943 
944    Concepts: additive Schwarz method
945 
946     References:
947     An additive variant of the Schwarz alternating method for the case of many subregions
948     M Dryja, OB Widlund - Courant Institute, New York University Technical report
949 
950     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
951     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.
952 
953 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
954            PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetLocalSubdomains(),
955            PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType()
956 
957 M*/
958 
959 EXTERN_C_BEGIN
960 #undef __FUNCT__
961 #define __FUNCT__ "PCCreate_GASM"
962 PetscErrorCode PETSCKSP_DLLEXPORT PCCreate_GASM(PC pc)
963 {
964   PetscErrorCode ierr;
965   PC_GASM         *osm;
966 
967   PetscFunctionBegin;
968   ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr);
969   osm->N                 = PETSC_DECIDE;
970   osm->n                 = 0;
971   osm->nmax              = 0;
972   osm->overlap           = 1;
973   osm->ksp               = 0;
974   osm->grestriction      = 0;
975   osm->gprolongation     = 0;
976   osm->gx                = 0;
977   osm->gy                = 0;
978   osm->x                 = 0;
979   osm->y                 = 0;
980   osm->is                = 0;
981   osm->is_local          = 0;
982   osm->pmat              = 0;
983   osm->type              = PC_GASM_RESTRICT;
984   osm->same_local_solves = PETSC_TRUE;
985   osm->sort_indices      = PETSC_TRUE;
986 
987   pc->data                   = (void*)osm;
988   pc->ops->apply             = PCApply_GASM;
989   pc->ops->applytranspose    = PCApplyTranspose_GASM;
990   pc->ops->setup             = PCSetUp_GASM;
991   pc->ops->destroy           = PCDestroy_GASM;
992   pc->ops->setfromoptions    = PCSetFromOptions_GASM;
993   pc->ops->setuponblocks     = PCSetUpOnBlocks_GASM;
994   pc->ops->view              = PCView_GASM;
995   pc->ops->applyrichardson   = 0;
996 
997   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetLocalSubdomains_C","PCGASMSetLocalSubdomains_GASM",
998                     PCGASMSetLocalSubdomains_GASM);CHKERRQ(ierr);
999   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM",
1000                     PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr);
1001   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM",
1002                     PCGASMSetOverlap_GASM);CHKERRQ(ierr);
1003   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM",
1004                     PCGASMSetType_GASM);CHKERRQ(ierr);
1005   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM",
1006                     PCGASMSetSortIndices_GASM);CHKERRQ(ierr);
1007   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM",
1008                     PCGASMGetSubKSP_GASM);CHKERRQ(ierr);
1009   PetscFunctionReturn(0);
1010 }
1011 EXTERN_C_END
1012 
1013 
1014 #undef __FUNCT__
1015 #define __FUNCT__ "PCGASMCreateSubdomains"
1016 /*@C
1017    PCGASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1018    preconditioner for a any problem on a general grid.
1019 
1020    Collective
1021 
1022    Input Parameters:
1023 +  A - The global matrix operator
1024 -  n - the number of local blocks
1025 
1026    Output Parameters:
1027 .  outis - the array of index sets defining the subdomains
1028 
1029    Level: advanced
1030 
1031    Note: this generates nonoverlapping subdomains; the PCGASM will generate the overlap
1032     from these if you use PCGASMSetLocalSubdomains()
1033 
1034     In the Fortran version you must provide the array outis[] already allocated of length n.
1035 
1036 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1037 
1038 .seealso: PCGASMSetLocalSubdomains(), PCGASMDestroySubdomains()
1039 @*/
1040 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1041 {
1042   MatPartitioning           mpart;
1043   const char                *prefix;
1044   PetscErrorCode            (*f)(Mat,PetscBool *,MatReuse,Mat*);
1045   PetscMPIInt               size;
1046   PetscInt                  i,j,rstart,rend,bs;
1047   PetscBool                 iscopy = PETSC_FALSE,isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1048   Mat                       Ad = PETSC_NULL, adj;
1049   IS                        ispart,isnumb,*is;
1050   PetscErrorCode            ierr;
1051 
1052   PetscFunctionBegin;
1053   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
1054   PetscValidPointer(outis,3);
1055   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);
1056 
1057   /* Get prefix, row distribution, and block size */
1058   ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr);
1059   ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr);
1060   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
1061   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);
1062 
1063   /* Get diagonal block from matrix if possible */
1064   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
1065   ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr);
1066   if (f) {
1067     ierr = (*f)(A,&iscopy,MAT_INITIAL_MATRIX,&Ad);CHKERRQ(ierr);
1068   } else if (size == 1) {
1069     iscopy = PETSC_FALSE; Ad = A;
1070   } else {
1071     iscopy = PETSC_FALSE; Ad = PETSC_NULL;
1072   }
1073   if (Ad) {
1074     ierr = PetscTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr);
1075     if (!isbaij) {ierr = PetscTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);}
1076   }
1077   if (Ad && n > 1) {
1078     PetscBool  match,done;
1079     /* Try to setup a good matrix partitioning if available */
1080     ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr);
1081     ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
1082     ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
1083     ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr);
1084     if (!match) {
1085       ierr = PetscTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr);
1086     }
1087     if (!match) { /* assume a "good" partitioner is available */
1088       PetscInt na,*ia,*ja;
1089       ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1090       if (done) {
1091 	/* Build adjacency matrix by hand. Unfortunately a call to
1092 	   MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1093 	   remove the block-aij structure and we cannot expect
1094 	   MatPartitioning to split vertices as we need */
1095 	PetscInt i,j,*row,len,nnz,cnt,*iia=0,*jja=0;
1096 	nnz = 0;
1097 	for (i=0; i<na; i++) { /* count number of nonzeros */
1098 	  len = ia[i+1] - ia[i];
1099 	  row = ja + ia[i];
1100 	  for (j=0; j<len; j++) {
1101 	    if (row[j] == i) { /* don't count diagonal */
1102 	      len--; break;
1103 	    }
1104 	  }
1105 	  nnz += len;
1106 	}
1107 	ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr);
1108 	ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr);
1109 	nnz    = 0;
1110 	iia[0] = 0;
1111 	for (i=0; i<na; i++) { /* fill adjacency */
1112 	  cnt = 0;
1113 	  len = ia[i+1] - ia[i];
1114 	  row = ja + ia[i];
1115 	  for (j=0; j<len; j++) {
1116 	    if (row[j] != i) { /* if not diagonal */
1117 	      jja[nnz+cnt++] = row[j];
1118 	    }
1119 	  }
1120 	  nnz += cnt;
1121 	  iia[i+1] = nnz;
1122 	}
1123 	/* Partitioning of the adjacency matrix */
1124 	ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr);
1125 	ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr);
1126 	ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr);
1127 	ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr);
1128 	ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr);
1129 	ierr = MatDestroy(adj);CHKERRQ(ierr);
1130 	foundpart = PETSC_TRUE;
1131       }
1132       ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr);
1133     }
1134     ierr = MatPartitioningDestroy(mpart);CHKERRQ(ierr);
1135   }
1136   if (iscopy) {ierr = MatDestroy(Ad);CHKERRQ(ierr);}
1137 
1138   ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr);
1139   *outis = is;
1140 
1141   if (!foundpart) {
1142 
1143     /* Partitioning by contiguous chunks of rows */
1144 
1145     PetscInt mbs   = (rend-rstart)/bs;
1146     PetscInt start = rstart;
1147     for (i=0; i<n; i++) {
1148       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1149       ierr   = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr);
1150       start += count;
1151     }
1152 
1153   } else {
1154 
1155     /* Partitioning by adjacency of diagonal block  */
1156 
1157     const PetscInt *numbering;
1158     PetscInt       *count,nidx,*indices,*newidx,start=0;
1159     /* Get node count in each partition */
1160     ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr);
1161     ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr);
1162     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1163       for (i=0; i<n; i++) count[i] *= bs;
1164     }
1165     /* Build indices from node numbering */
1166     ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr);
1167     ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr);
1168     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1169     ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr);
1170     ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr);
1171     ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr);
1172     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1173       ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr);
1174       for (i=0; i<nidx; i++)
1175 	for (j=0; j<bs; j++)
1176 	  newidx[i*bs+j] = indices[i]*bs + j;
1177       ierr = PetscFree(indices);CHKERRQ(ierr);
1178       nidx   *= bs;
1179       indices = newidx;
1180     }
1181     /* Shift to get global indices */
1182     for (i=0; i<nidx; i++) indices[i] += rstart;
1183 
1184     /* Build the index sets for each block */
1185     for (i=0; i<n; i++) {
1186       ierr   = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr);
1187       ierr   = ISSort(is[i]);CHKERRQ(ierr);
1188       start += count[i];
1189     }
1190 
1191     ierr = PetscFree(count);
1192     ierr = PetscFree(indices);
1193     ierr = ISDestroy(isnumb);CHKERRQ(ierr);
1194     ierr = ISDestroy(ispart);CHKERRQ(ierr);
1195 
1196   }
1197 
1198   PetscFunctionReturn(0);
1199 }
1200 
1201 #undef __FUNCT__
1202 #define __FUNCT__ "PCGASMDestroySubdomains"
1203 /*@C
1204    PCGASMDestroySubdomains - Destroys the index sets created with
1205    PCGASMCreateSubdomains(). Should be called after setting subdomains
1206    with PCGASMSetLocalSubdomains().
1207 
1208    Collective
1209 
1210    Input Parameters:
1211 +  n - the number of index sets
1212 .  is - the array of index sets
1213 -  is_local - the array of local index sets, can be PETSC_NULL
1214 
1215    Level: advanced
1216 
1217 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid
1218 
1219 .seealso: PCGASMCreateSubdomains(), PCGASMSetLocalSubdomains()
1220 @*/
1221 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1222 {
1223   PetscInt       i;
1224   PetscErrorCode ierr;
1225   PetscFunctionBegin;
1226   if (n <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"n must be > 0: n = %D",n);
1227   PetscValidPointer(is,2);
1228   for (i=0; i<n; i++) { ierr = ISDestroy(is[i]);CHKERRQ(ierr); }
1229   ierr = PetscFree(is);CHKERRQ(ierr);
1230   if (is_local) {
1231     PetscValidPointer(is_local,3);
1232     for (i=0; i<n; i++) { ierr = ISDestroy(is_local[i]);CHKERRQ(ierr); }
1233     ierr = PetscFree(is_local);CHKERRQ(ierr);
1234   }
1235   PetscFunctionReturn(0);
1236 }
1237 
1238 #undef __FUNCT__
1239 #define __FUNCT__ "PCGASMCreateSubdomains2D"
1240 /*@
1241    PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1242    preconditioner for a two-dimensional problem on a regular grid.
1243 
1244    Not Collective
1245 
1246    Input Parameters:
1247 +  m, n - the number of mesh points in the x and y directions
1248 .  M, N - the number of subdomains in the x and y directions
1249 .  dof - degrees of freedom per node
1250 -  overlap - overlap in mesh lines
1251 
1252    Output Parameters:
1253 +  Nsub - the number of subdomains created
1254 .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1255 -  is_local - array of index sets defining non-overlapping subdomains
1256 
1257    Note:
1258    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1259    preconditioners.  More general related routines are
1260    PCGASMSetTotalSubdomains() and PCGASMSetLocalSubdomains().
1261 
1262    Level: advanced
1263 
1264 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid
1265 
1266 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetLocalSubdomains(), PCGASMGetSubKSP(),
1267           PCGASMSetOverlap()
1268 @*/
1269 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1270 {
1271   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1272   PetscErrorCode ierr;
1273   PetscInt       nidx,*idx,loc,ii,jj,count;
1274 
1275   PetscFunctionBegin;
1276   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");
1277 
1278   *Nsub = N*M;
1279   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is);CHKERRQ(ierr);
1280   ierr = PetscMalloc((*Nsub)*sizeof(IS*),is_local);CHKERRQ(ierr);
1281   ystart = 0;
1282   loc_outer = 0;
1283   for (i=0; i<N; i++) {
1284     height = n/N + ((n % N) > i); /* height of subdomain */
1285     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1286     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1287     yright = ystart + height + overlap; if (yright > n) yright = n;
1288     xstart = 0;
1289     for (j=0; j<M; j++) {
1290       width = m/M + ((m % M) > j); /* width of subdomain */
1291       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1292       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1293       xright = xstart + width + overlap; if (xright > m) xright = m;
1294       nidx   = (xright - xleft)*(yright - yleft);
1295       ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1296       loc    = 0;
1297       for (ii=yleft; ii<yright; ii++) {
1298         count = m*ii + xleft;
1299         for (jj=xleft; jj<xright; jj++) {
1300           idx[loc++] = count++;
1301         }
1302       }
1303       ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr);
1304       if (overlap == 0) {
1305         ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr);
1306         (*is_local)[loc_outer] = (*is)[loc_outer];
1307       } else {
1308         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1309           for (jj=xstart; jj<xstart+width; jj++) {
1310             idx[loc++] = m*ii + jj;
1311           }
1312         }
1313         ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr);
1314       }
1315       ierr = PetscFree(idx);CHKERRQ(ierr);
1316       xstart += width;
1317       loc_outer++;
1318     }
1319     ystart += height;
1320   }
1321   for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); }
1322   PetscFunctionReturn(0);
1323 }
1324 
1325 #undef __FUNCT__
1326 #define __FUNCT__ "PCGASMGetLocalSubdomains"
1327 /*@C
1328     PCGASMGetLocalSubdomains - Gets the local subdomains (for this processor
1329     only) for the additive Schwarz preconditioner.
1330 
1331     Not Collective
1332 
1333     Input Parameter:
1334 .   pc - the preconditioner context
1335 
1336     Output Parameters:
1337 +   n - the number of subdomains for this processor (default value = 1)
1338 .   is - the index sets that define the subdomains for this processor
1339 -   is_local - the index sets that define the local part of the subdomains for this processor (can be PETSC_NULL)
1340 
1341 
1342     Notes:
1343     The IS numbering is in the parallel, global numbering of the vector.
1344 
1345     Level: advanced
1346 
1347 .keywords: PC, GASM, set, local, subdomains, additive Schwarz
1348 
1349 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1350           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubmatrices()
1351 @*/
1352 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1353 {
1354   PC_GASM         *osm;
1355   PetscErrorCode ierr;
1356   PetscBool      match;
1357 
1358   PetscFunctionBegin;
1359   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1360   PetscValidIntPointer(n,2);
1361   if (is) PetscValidPointer(is,3);
1362   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1363   if (!match) {
1364     if (n)  *n  = 0;
1365     if (is) *is = PETSC_NULL;
1366   } else {
1367     osm = (PC_GASM*)pc->data;
1368     if (n)  *n  = osm->n;
1369     if (is) *is = osm->is;
1370     if (is_local) *is_local = osm->is_local;
1371   }
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 #undef __FUNCT__
1376 #define __FUNCT__ "PCGASMGetLocalSubmatrices"
1377 /*@C
1378     PCGASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1379     only) for the additive Schwarz preconditioner.
1380 
1381     Not Collective
1382 
1383     Input Parameter:
1384 .   pc - the preconditioner context
1385 
1386     Output Parameters:
1387 +   n - the number of matrices for this processor (default value = 1)
1388 -   mat - the matrices
1389 
1390 
1391     Level: advanced
1392 
1393 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi
1394 
1395 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(),
1396           PCGASMCreateSubdomains2D(), PCGASMSetLocalSubdomains(), PCGASMGetLocalSubdomains()
1397 @*/
1398 PetscErrorCode PETSCKSP_DLLEXPORT PCGASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1399 {
1400   PC_GASM         *osm;
1401   PetscErrorCode ierr;
1402   PetscBool      match;
1403 
1404   PetscFunctionBegin;
1405   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1406   PetscValidIntPointer(n,2);
1407   if (mat) PetscValidPointer(mat,3);
1408   if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1409   ierr = PetscTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr);
1410   if (!match) {
1411     if (n)   *n   = 0;
1412     if (mat) *mat = PETSC_NULL;
1413   } else {
1414     osm = (PC_GASM*)pc->data;
1415     if (n)   *n   = osm->n;
1416     if (mat) *mat = osm->pmat;
1417   }
1418   PetscFunctionReturn(0);
1419 }
1420