xref: /petsc/src/ksp/pc/impls/wb/wb.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 
2 #include <petscdmda.h>   /*I "petscdmda.h" I*/
3 #include <petsc/private/pcmgimpl.h>   /*I "petscksp.h" I*/
4 #include <petscctable.h>
5 
6 typedef struct {
7   PCExoticType type;
8   Mat          P;            /* the constructed interpolation matrix */
9   PetscBool    directSolve;  /* use direct LU factorization to construct interpolation */
10   KSP          ksp;
11 } PC_Exotic;
12 
13 const char *const PCExoticTypes[] = {"face","wirebasket","PCExoticType","PC_Exotic",NULL};
14 
15 /*
16       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
17 
18 */
19 PetscErrorCode DMDAGetWireBasketInterpolation(PC pc,DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
20 {
21   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
22   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[26],*globals,Ng,*IIint,*IIsurf,Nt;
23   Mat                    Xint, Xsurf,Xint_tmp;
24   IS                     isint,issurf,is,row,col;
25   ISLocalToGlobalMapping ltg;
26   MPI_Comm               comm;
27   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
28   MatFactorInfo          info;
29   PetscScalar            *xsurf,*xint;
30   const PetscScalar      *rxint;
31 #if defined(PETSC_USE_DEBUG_foo)
32   PetscScalar            tmp;
33 #endif
34   PetscTable             ht;
35 
36   PetscFunctionBegin;
37   CHKERRQ(DMDAGetInfo(da,&dim,NULL,NULL,NULL,&mp,&np,&pp,&dof,NULL,NULL,NULL,NULL,NULL));
38   PetscCheckFalse(dof != 1,PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
39   PetscCheckFalse(dim != 3,PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
40   CHKERRQ(DMDAGetCorners(da,NULL,NULL,NULL,&m,&n,&p));
41   CHKERRQ(DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth));
42   istart = istart ? -1 : 0;
43   jstart = jstart ? -1 : 0;
44   kstart = kstart ? -1 : 0;
45 
46   /*
47     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
48     to all the local degrees of freedom (this includes the vertices, edges and faces).
49 
50     Xint are the subset of the interpolation into the interior
51 
52     Xface are the interpolation onto faces but not into the interior
53 
54     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
55                                       Xint
56     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
57                                       Xsurf
58   */
59   N     = (m - istart)*(n - jstart)*(p - kstart);
60   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
61   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
62   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
63   Nsurf = Nface + Nwire;
64   CHKERRQ(MatCreateSeqDense(MPI_COMM_SELF,Nint,26,NULL,&Xint));
65   CHKERRQ(MatCreateSeqDense(MPI_COMM_SELF,Nsurf,26,NULL,&Xsurf));
66   CHKERRQ(MatDenseGetArray(Xsurf,&xsurf));
67 
68   /*
69      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
70      Xsurf will be all zero (thus making the coarse matrix singular).
71   */
72   PetscCheckFalse(m-istart < 3,PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
73   PetscCheckFalse(n-jstart < 3,PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
74   PetscCheckFalse(p-kstart < 3,PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
75 
76   cnt = 0;
77 
78   xsurf[cnt++] = 1;
79   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + Nsurf] = 1;
80   xsurf[cnt++ + 2*Nsurf] = 1;
81 
82   for (j=1; j<n-1-jstart; j++) {
83     xsurf[cnt++ + 3*Nsurf] = 1;
84     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
85     xsurf[cnt++ + 5*Nsurf] = 1;
86   }
87 
88   xsurf[cnt++ + 6*Nsurf] = 1;
89   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 7*Nsurf] = 1;
90   xsurf[cnt++ + 8*Nsurf] = 1;
91 
92   for (k=1; k<p-1-kstart; k++) {
93     xsurf[cnt++ + 9*Nsurf] = 1;
94     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 10*Nsurf] = 1;
95     xsurf[cnt++ + 11*Nsurf] = 1;
96 
97     for (j=1; j<n-1-jstart; j++) {
98       xsurf[cnt++ + 12*Nsurf] = 1;
99       /* these are the interior nodes */
100       xsurf[cnt++ + 13*Nsurf] = 1;
101     }
102 
103     xsurf[cnt++ + 14*Nsurf] = 1;
104     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 15*Nsurf] = 1;
105     xsurf[cnt++ + 16*Nsurf] = 1;
106   }
107 
108   xsurf[cnt++ + 17*Nsurf] = 1;
109   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 18*Nsurf] = 1;
110   xsurf[cnt++ + 19*Nsurf] = 1;
111 
112   for (j=1;j<n-1-jstart;j++) {
113     xsurf[cnt++ + 20*Nsurf] = 1;
114     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 21*Nsurf] = 1;
115     xsurf[cnt++ + 22*Nsurf] = 1;
116   }
117 
118   xsurf[cnt++ + 23*Nsurf] = 1;
119   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 24*Nsurf] = 1;
120   xsurf[cnt++ + 25*Nsurf] = 1;
121 
122   /* interpolations only sum to 1 when using direct solver */
123 #if defined(PETSC_USE_DEBUG_foo)
124   for (i=0; i<Nsurf; i++) {
125     tmp = 0.0;
126     for (j=0; j<26; j++) tmp += xsurf[i+j*Nsurf];
127     PetscCheckFalse(PetscAbsScalar(tmp-1.0) > 1.e-10,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
128   }
129 #endif
130   CHKERRQ(MatDenseRestoreArray(Xsurf,&xsurf));
131   /* CHKERRQ(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/
132 
133   /*
134        I are the indices for all the needed vertices (in global numbering)
135        Iint are the indices for the interior values, I surf for the surface values
136             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
137              is NOT the local DMDA ordering.)
138        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
139   */
140 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
141   CHKERRQ(PetscMalloc3(N,&II,Nint,&Iint,Nsurf,&Isurf));
142   CHKERRQ(PetscMalloc2(Nint,&IIint,Nsurf,&IIsurf));
143   for (k=0; k<p-kstart; k++) {
144     for (j=0; j<n-jstart; j++) {
145       for (i=0; i<m-istart; i++) {
146         II[c++] = i + j*mwidth + k*mwidth*nwidth;
147 
148         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
149           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
150           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
151         } else {
152           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
153           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
154         }
155       }
156     }
157   }
158   PetscCheckFalse(c != N,PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
159   PetscCheckFalse(cint != Nint,PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
160   PetscCheckFalse(csurf != Nsurf,PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
161   CHKERRQ(DMGetLocalToGlobalMapping(da,&ltg));
162   CHKERRQ(ISLocalToGlobalMappingApply(ltg,N,II,II));
163   CHKERRQ(ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint));
164   CHKERRQ(ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf));
165   CHKERRQ(PetscObjectGetComm((PetscObject)da,&comm));
166   CHKERRQ(ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is));
167   CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint));
168   CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf));
169   CHKERRQ(PetscFree3(II,Iint,Isurf));
170 
171   CHKERRQ(MatCreateSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder));
172   A    = *Aholder;
173   CHKERRQ(PetscFree(Aholder));
174 
175   CHKERRQ(MatCreateSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii));
176   CHKERRQ(MatCreateSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais));
177   CHKERRQ(MatCreateSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi));
178 
179   /*
180      Solve for the interpolation onto the interior Xint
181   */
182   CHKERRQ(MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp));
183   CHKERRQ(MatScale(Xint_tmp,-1.0));
184   if (exotic->directSolve) {
185     CHKERRQ(MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii));
186     CHKERRQ(MatFactorInfoInitialize(&info));
187     CHKERRQ(MatGetOrdering(Aii,MATORDERINGND,&row,&col));
188     CHKERRQ(MatLUFactorSymbolic(iAii,Aii,row,col,&info));
189     CHKERRQ(ISDestroy(&row));
190     CHKERRQ(ISDestroy(&col));
191     CHKERRQ(MatLUFactorNumeric(iAii,Aii,&info));
192     CHKERRQ(MatMatSolve(iAii,Xint_tmp,Xint));
193     CHKERRQ(MatDestroy(&iAii));
194   } else {
195     Vec         b,x;
196     PetscScalar *xint_tmp;
197 
198     CHKERRQ(MatDenseGetArray(Xint,&xint));
199     CHKERRQ(VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,NULL,&x));
200     CHKERRQ(MatDenseGetArray(Xint_tmp,&xint_tmp));
201     CHKERRQ(VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,NULL,&b));
202     CHKERRQ(KSPSetOperators(exotic->ksp,Aii,Aii));
203     for (i=0; i<26; i++) {
204       CHKERRQ(VecPlaceArray(x,xint+i*Nint));
205       CHKERRQ(VecPlaceArray(b,xint_tmp+i*Nint));
206       CHKERRQ(KSPSolve(exotic->ksp,b,x));
207       CHKERRQ(KSPCheckSolve(exotic->ksp,pc,x));
208       CHKERRQ(VecResetArray(x));
209       CHKERRQ(VecResetArray(b));
210     }
211     CHKERRQ(MatDenseRestoreArray(Xint,&xint));
212     CHKERRQ(MatDenseRestoreArray(Xint_tmp,&xint_tmp));
213     CHKERRQ(VecDestroy(&x));
214     CHKERRQ(VecDestroy(&b));
215   }
216   CHKERRQ(MatDestroy(&Xint_tmp));
217 
218 #if defined(PETSC_USE_DEBUG_foo)
219   CHKERRQ(MatDenseGetArrayRead(Xint,&rxint));
220   for (i=0; i<Nint; i++) {
221     tmp = 0.0;
222     for (j=0; j<26; j++) tmp += rxint[i+j*Nint];
223 
224     PetscCheckFalse(PetscAbsScalar(tmp-1.0) > 1.e-10,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
225   }
226   CHKERRQ(MatDenseRestoreArrayRead(Xint,&rxint));
227   /* CHKERRQ(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
228 #endif
229 
230   /*         total vertices             total faces                                  total edges */
231   Ntotal = (mp + 1)*(np + 1)*(pp + 1) + mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1) + mp*(np+1)*(pp+1) + np*(mp+1)*(pp+1) +  pp*(mp+1)*(np+1);
232 
233   /*
234       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
235   */
236   cnt = 0;
237 
238   gl[cnt++] = 0;  { gl[cnt++] = 1;} gl[cnt++] = m-istart-1;
239   { gl[cnt++] = mwidth;  { gl[cnt++] = mwidth+1;} gl[cnt++] = mwidth + m-istart-1;}
240   gl[cnt++] = mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*(n-jstart-1) + m-istart-1;
241   {
242     gl[cnt++] = mwidth*nwidth;  { gl[cnt++] = mwidth*nwidth+1;}  gl[cnt++] = mwidth*nwidth+ m-istart-1;
243     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
244     gl[cnt++] = mwidth*nwidth+ mwidth*(n-jstart-1);   { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1) + m-istart-1;
245   }
246   gl[cnt++] = mwidth*nwidth*(p-kstart-1); { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  m-istart-1;
247   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth;   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1)+mwidth+m-istart-1;}
248   gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+ mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth*(n-jstart-1) + m-istart-1;
249 
250   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
251   /* convert that to global numbering and get them on all processes */
252   CHKERRQ(ISLocalToGlobalMappingApply(ltg,26,gl,gl));
253   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
254   CHKERRQ(PetscMalloc1(26*mp*np*pp,&globals));
255   CHKERRMPI(MPI_Allgather(gl,26,MPIU_INT,globals,26,MPIU_INT,PetscObjectComm((PetscObject)da)));
256 
257   /* Number the coarse grid points from 0 to Ntotal */
258   CHKERRQ(MatGetSize(Aglobal,&Nt,NULL));
259   CHKERRQ(PetscTableCreate(Ntotal/3,Nt+1,&ht));
260   for (i=0; i<26*mp*np*pp; i++) {
261     CHKERRQ(PetscTableAddCount(ht,globals[i]+1));
262   }
263   CHKERRQ(PetscTableGetCount(ht,&cnt));
264   PetscCheckFalse(cnt != Ntotal,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
265   CHKERRQ(PetscFree(globals));
266   for (i=0; i<26; i++) {
267     CHKERRQ(PetscTableFind(ht,gl[i]+1,&gl[i]));
268     gl[i]--;
269   }
270   CHKERRQ(PetscTableDestroy(&ht));
271   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
272 
273   /* construct global interpolation matrix */
274   CHKERRQ(MatGetLocalSize(Aglobal,&Ng,NULL));
275   if (reuse == MAT_INITIAL_MATRIX) {
276     CHKERRQ(MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint+Nsurf,NULL,P));
277   } else {
278     CHKERRQ(MatZeroEntries(*P));
279   }
280   CHKERRQ(MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE));
281   CHKERRQ(MatDenseGetArrayRead(Xint,&rxint));
282   CHKERRQ(MatSetValues(*P,Nint,IIint,26,gl,rxint,INSERT_VALUES));
283   CHKERRQ(MatDenseRestoreArrayRead(Xint,&rxint));
284   CHKERRQ(MatDenseGetArrayRead(Xsurf,&rxint));
285   CHKERRQ(MatSetValues(*P,Nsurf,IIsurf,26,gl,rxint,INSERT_VALUES));
286   CHKERRQ(MatDenseRestoreArrayRead(Xsurf,&rxint));
287   CHKERRQ(MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY));
288   CHKERRQ(MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY));
289   CHKERRQ(PetscFree2(IIint,IIsurf));
290 
291 #if defined(PETSC_USE_DEBUG_foo)
292   {
293     Vec         x,y;
294     PetscScalar *yy;
295     CHKERRQ(VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y));
296     CHKERRQ(VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x));
297     CHKERRQ(VecSet(x,1.0));
298     CHKERRQ(MatMult(*P,x,y));
299     CHKERRQ(VecGetArray(y,&yy));
300     for (i=0; i<Ng; i++) {
301       PetscCheckFalse(PetscAbsScalar(yy[i]-1.0) > 1.e-10,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %g",i,(double)PetscAbsScalar(yy[i]));
302     }
303     CHKERRQ(VecRestoreArray(y,&yy));
304     CHKERRQ(VecDestroy(x));
305     CHKERRQ(VecDestroy(y));
306   }
307 #endif
308 
309   CHKERRQ(MatDestroy(&Aii));
310   CHKERRQ(MatDestroy(&Ais));
311   CHKERRQ(MatDestroy(&Asi));
312   CHKERRQ(MatDestroy(&A));
313   CHKERRQ(ISDestroy(&is));
314   CHKERRQ(ISDestroy(&isint));
315   CHKERRQ(ISDestroy(&issurf));
316   CHKERRQ(MatDestroy(&Xint));
317   CHKERRQ(MatDestroy(&Xsurf));
318   PetscFunctionReturn(0);
319 }
320 
321 /*
322       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
323 
324 */
325 PetscErrorCode DMDAGetFaceInterpolation(PC pc,DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
326 {
327   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
328   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[6],*globals,Ng,*IIint,*IIsurf,Nt;
329   Mat                    Xint, Xsurf,Xint_tmp;
330   IS                     isint,issurf,is,row,col;
331   ISLocalToGlobalMapping ltg;
332   MPI_Comm               comm;
333   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
334   MatFactorInfo          info;
335   PetscScalar            *xsurf,*xint;
336   const PetscScalar      *rxint;
337 #if defined(PETSC_USE_DEBUG_foo)
338   PetscScalar            tmp;
339 #endif
340   PetscTable             ht;
341 
342   PetscFunctionBegin;
343   CHKERRQ(DMDAGetInfo(da,&dim,NULL,NULL,NULL,&mp,&np,&pp,&dof,NULL,NULL,NULL,NULL,NULL));
344   PetscCheckFalse(dof != 1,PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
345   PetscCheckFalse(dim != 3,PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
346   CHKERRQ(DMDAGetCorners(da,NULL,NULL,NULL,&m,&n,&p));
347   CHKERRQ(DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth));
348   istart = istart ? -1 : 0;
349   jstart = jstart ? -1 : 0;
350   kstart = kstart ? -1 : 0;
351 
352   /*
353     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
354     to all the local degrees of freedom (this includes the vertices, edges and faces).
355 
356     Xint are the subset of the interpolation into the interior
357 
358     Xface are the interpolation onto faces but not into the interior
359 
360     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
361                                       Xint
362     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
363                                       Xsurf
364   */
365   N     = (m - istart)*(n - jstart)*(p - kstart);
366   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
367   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
368   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
369   Nsurf = Nface + Nwire;
370   CHKERRQ(MatCreateSeqDense(MPI_COMM_SELF,Nint,6,NULL,&Xint));
371   CHKERRQ(MatCreateSeqDense(MPI_COMM_SELF,Nsurf,6,NULL,&Xsurf));
372   CHKERRQ(MatDenseGetArray(Xsurf,&xsurf));
373 
374   /*
375      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
376      Xsurf will be all zero (thus making the coarse matrix singular).
377   */
378   PetscCheckFalse(m-istart < 3,PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
379   PetscCheckFalse(n-jstart < 3,PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
380   PetscCheckFalse(p-kstart < 3,PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
381 
382   cnt = 0;
383   for (j=1; j<n-1-jstart; j++) {
384     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 0*Nsurf] = 1;
385   }
386 
387   for (k=1; k<p-1-kstart; k++) {
388     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 1*Nsurf] = 1;
389     for (j=1; j<n-1-jstart; j++) {
390       xsurf[cnt++ + 2*Nsurf] = 1;
391       /* these are the interior nodes */
392       xsurf[cnt++ + 3*Nsurf] = 1;
393     }
394     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
395   }
396   for (j=1;j<n-1-jstart;j++) {
397     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 5*Nsurf] = 1;
398   }
399 
400 #if defined(PETSC_USE_DEBUG_foo)
401   for (i=0; i<Nsurf; i++) {
402     tmp = 0.0;
403     for (j=0; j<6; j++) tmp += xsurf[i+j*Nsurf];
404 
405     PetscCheckFalse(PetscAbsScalar(tmp-1.0) > 1.e-10,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
406   }
407 #endif
408   CHKERRQ(MatDenseRestoreArray(Xsurf,&xsurf));
409   /* CHKERRQ(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/
410 
411   /*
412        I are the indices for all the needed vertices (in global numbering)
413        Iint are the indices for the interior values, I surf for the surface values
414             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
415              is NOT the local DMDA ordering.)
416        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
417   */
418 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
419   CHKERRQ(PetscMalloc3(N,&II,Nint,&Iint,Nsurf,&Isurf));
420   CHKERRQ(PetscMalloc2(Nint,&IIint,Nsurf,&IIsurf));
421   for (k=0; k<p-kstart; k++) {
422     for (j=0; j<n-jstart; j++) {
423       for (i=0; i<m-istart; i++) {
424         II[c++] = i + j*mwidth + k*mwidth*nwidth;
425 
426         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
427           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
428           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
429         } else {
430           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
431           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
432         }
433       }
434     }
435   }
436   PetscCheckFalse(c != N,PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
437   PetscCheckFalse(cint != Nint,PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
438   PetscCheckFalse(csurf != Nsurf,PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
439   CHKERRQ(DMGetLocalToGlobalMapping(da,&ltg));
440   CHKERRQ(ISLocalToGlobalMappingApply(ltg,N,II,II));
441   CHKERRQ(ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint));
442   CHKERRQ(ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf));
443   CHKERRQ(PetscObjectGetComm((PetscObject)da,&comm));
444   CHKERRQ(ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is));
445   CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint));
446   CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf));
447   CHKERRQ(PetscFree3(II,Iint,Isurf));
448 
449   CHKERRQ(ISSort(is));
450   CHKERRQ(MatCreateSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder));
451   A    = *Aholder;
452   CHKERRQ(PetscFree(Aholder));
453 
454   CHKERRQ(MatCreateSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii));
455   CHKERRQ(MatCreateSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais));
456   CHKERRQ(MatCreateSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi));
457 
458   /*
459      Solve for the interpolation onto the interior Xint
460   */
461   CHKERRQ(MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp));
462   CHKERRQ(MatScale(Xint_tmp,-1.0));
463 
464   if (exotic->directSolve) {
465     CHKERRQ(MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii));
466     CHKERRQ(MatFactorInfoInitialize(&info));
467     CHKERRQ(MatGetOrdering(Aii,MATORDERINGND,&row,&col));
468     CHKERRQ(MatLUFactorSymbolic(iAii,Aii,row,col,&info));
469     CHKERRQ(ISDestroy(&row));
470     CHKERRQ(ISDestroy(&col));
471     CHKERRQ(MatLUFactorNumeric(iAii,Aii,&info));
472     CHKERRQ(MatMatSolve(iAii,Xint_tmp,Xint));
473     CHKERRQ(MatDestroy(&iAii));
474   } else {
475     Vec         b,x;
476     PetscScalar *xint_tmp;
477 
478     CHKERRQ(MatDenseGetArray(Xint,&xint));
479     CHKERRQ(VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,NULL,&x));
480     CHKERRQ(MatDenseGetArray(Xint_tmp,&xint_tmp));
481     CHKERRQ(VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,NULL,&b));
482     CHKERRQ(KSPSetOperators(exotic->ksp,Aii,Aii));
483     for (i=0; i<6; i++) {
484       CHKERRQ(VecPlaceArray(x,xint+i*Nint));
485       CHKERRQ(VecPlaceArray(b,xint_tmp+i*Nint));
486       CHKERRQ(KSPSolve(exotic->ksp,b,x));
487       CHKERRQ(KSPCheckSolve(exotic->ksp,pc,x));
488       CHKERRQ(VecResetArray(x));
489       CHKERRQ(VecResetArray(b));
490     }
491     CHKERRQ(MatDenseRestoreArray(Xint,&xint));
492     CHKERRQ(MatDenseRestoreArray(Xint_tmp,&xint_tmp));
493     CHKERRQ(VecDestroy(&x));
494     CHKERRQ(VecDestroy(&b));
495   }
496   CHKERRQ(MatDestroy(&Xint_tmp));
497 
498 #if defined(PETSC_USE_DEBUG_foo)
499   CHKERRQ(MatDenseGetArrayRead(Xint,&rxint));
500   for (i=0; i<Nint; i++) {
501     tmp = 0.0;
502     for (j=0; j<6; j++) tmp += rxint[i+j*Nint];
503 
504     PetscCheckFalse(PetscAbsScalar(tmp-1.0) > 1.e-10,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
505   }
506   CHKERRQ(MatDenseRestoreArrayRead(Xint,&rxint));
507   /* CHKERRQ(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
508 #endif
509 
510   /*         total faces    */
511   Ntotal =  mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1);
512 
513   /*
514       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
515   */
516   cnt = 0;
517   { gl[cnt++] = mwidth+1;}
518   {
519     { gl[cnt++] = mwidth*nwidth+1;}
520     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
521     { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;}
522   }
523   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;}
524 
525   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
526   /* convert that to global numbering and get them on all processes */
527   CHKERRQ(ISLocalToGlobalMappingApply(ltg,6,gl,gl));
528   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
529   CHKERRQ(PetscMalloc1(6*mp*np*pp,&globals));
530   CHKERRMPI(MPI_Allgather(gl,6,MPIU_INT,globals,6,MPIU_INT,PetscObjectComm((PetscObject)da)));
531 
532   /* Number the coarse grid points from 0 to Ntotal */
533   CHKERRQ(MatGetSize(Aglobal,&Nt,NULL));
534   CHKERRQ(PetscTableCreate(Ntotal/3,Nt+1,&ht));
535   for (i=0; i<6*mp*np*pp; i++) {
536     CHKERRQ(PetscTableAddCount(ht,globals[i]+1));
537   }
538   CHKERRQ(PetscTableGetCount(ht,&cnt));
539   PetscCheckFalse(cnt != Ntotal,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
540   CHKERRQ(PetscFree(globals));
541   for (i=0; i<6; i++) {
542     CHKERRQ(PetscTableFind(ht,gl[i]+1,&gl[i]));
543     gl[i]--;
544   }
545   CHKERRQ(PetscTableDestroy(&ht));
546   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
547 
548   /* construct global interpolation matrix */
549   CHKERRQ(MatGetLocalSize(Aglobal,&Ng,NULL));
550   if (reuse == MAT_INITIAL_MATRIX) {
551     CHKERRQ(MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint,NULL,P));
552   } else {
553     CHKERRQ(MatZeroEntries(*P));
554   }
555   CHKERRQ(MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE));
556   CHKERRQ(MatDenseGetArrayRead(Xint,&rxint));
557   CHKERRQ(MatSetValues(*P,Nint,IIint,6,gl,rxint,INSERT_VALUES));
558   CHKERRQ(MatDenseRestoreArrayRead(Xint,&rxint));
559   CHKERRQ(MatDenseGetArrayRead(Xsurf,&rxint));
560   CHKERRQ(MatSetValues(*P,Nsurf,IIsurf,6,gl,rxint,INSERT_VALUES));
561   CHKERRQ(MatDenseRestoreArrayRead(Xsurf,&rxint));
562   CHKERRQ(MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY));
563   CHKERRQ(MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY));
564   CHKERRQ(PetscFree2(IIint,IIsurf));
565 
566 #if defined(PETSC_USE_DEBUG_foo)
567   {
568     Vec         x,y;
569     PetscScalar *yy;
570     CHKERRQ(VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y));
571     CHKERRQ(VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x));
572     CHKERRQ(VecSet(x,1.0));
573     CHKERRQ(MatMult(*P,x,y));
574     CHKERRQ(VecGetArray(y,&yy));
575     for (i=0; i<Ng; i++) {
576       PetscCheckFalse(PetscAbsScalar(yy[i]-1.0) > 1.e-10,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %g",i,(double)PetscAbsScalar(yy[i]));
577     }
578     CHKERRQ(VecRestoreArray(y,&yy));
579     CHKERRQ(VecDestroy(x));
580     CHKERRQ(VecDestroy(y));
581   }
582 #endif
583 
584   CHKERRQ(MatDestroy(&Aii));
585   CHKERRQ(MatDestroy(&Ais));
586   CHKERRQ(MatDestroy(&Asi));
587   CHKERRQ(MatDestroy(&A));
588   CHKERRQ(ISDestroy(&is));
589   CHKERRQ(ISDestroy(&isint));
590   CHKERRQ(ISDestroy(&issurf));
591   CHKERRQ(MatDestroy(&Xint));
592   CHKERRQ(MatDestroy(&Xsurf));
593   PetscFunctionReturn(0);
594 }
595 
596 /*@
597    PCExoticSetType - Sets the type of coarse grid interpolation to use
598 
599    Logically Collective on PC
600 
601    Input Parameters:
602 +  pc - the preconditioner context
603 -  type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)
604 
605    Notes:
606     The face based interpolation has 1 degree of freedom per face and ignores the
607      edge and vertex values completely in the coarse problem. For any seven point
608      stencil the interpolation of a constant on all faces into the interior is that constant.
609 
610      The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
611      per face. A constant on the subdomain boundary is interpolated as that constant
612      in the interior of the domain.
613 
614      The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
615      if A is nonsingular A_c is also nonsingular.
616 
617      Both interpolations are suitable for only scalar problems.
618 
619    Level: intermediate
620 
621 .seealso: PCEXOTIC, PCExoticType()
622 @*/
623 PetscErrorCode  PCExoticSetType(PC pc,PCExoticType type)
624 {
625   PetscFunctionBegin;
626   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
627   PetscValidLogicalCollectiveEnum(pc,type,2);
628   CHKERRQ(PetscTryMethod(pc,"PCExoticSetType_C",(PC,PCExoticType),(pc,type)));
629   PetscFunctionReturn(0);
630 }
631 
632 static PetscErrorCode  PCExoticSetType_Exotic(PC pc,PCExoticType type)
633 {
634   PC_MG     *mg  = (PC_MG*)pc->data;
635   PC_Exotic *ctx = (PC_Exotic*) mg->innerctx;
636 
637   PetscFunctionBegin;
638   ctx->type = type;
639   PetscFunctionReturn(0);
640 }
641 
642 PetscErrorCode PCSetUp_Exotic(PC pc)
643 {
644   Mat            A;
645   PC_MG          *mg   = (PC_MG*)pc->data;
646   PC_Exotic      *ex   = (PC_Exotic*) mg->innerctx;
647   MatReuse       reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
648 
649   PetscFunctionBegin;
650   PetscCheckFalse(!pc->dm,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Need to call PCSetDM() before using this PC");
651   CHKERRQ(PCGetOperators(pc,NULL,&A));
652   if (ex->type == PC_EXOTIC_FACE) {
653     CHKERRQ(DMDAGetFaceInterpolation(pc,pc->dm,ex,A,reuse,&ex->P));
654   } else if (ex->type == PC_EXOTIC_WIREBASKET) {
655     CHKERRQ(DMDAGetWireBasketInterpolation(pc,pc->dm,ex,A,reuse,&ex->P));
656   } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"Unknown exotic coarse space %d",ex->type);
657   CHKERRQ(PCMGSetInterpolation(pc,1,ex->P));
658   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
659   CHKERRQ(PCSetDM(pc,NULL));
660   CHKERRQ(PCSetUp_MG(pc));
661   PetscFunctionReturn(0);
662 }
663 
664 PetscErrorCode PCDestroy_Exotic(PC pc)
665 {
666   PC_MG          *mg  = (PC_MG*)pc->data;
667   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
668 
669   PetscFunctionBegin;
670   CHKERRQ(MatDestroy(&ctx->P));
671   CHKERRQ(KSPDestroy(&ctx->ksp));
672   CHKERRQ(PetscFree(ctx));
673   CHKERRQ(PCDestroy_MG(pc));
674   PetscFunctionReturn(0);
675 }
676 
677 PetscErrorCode PCView_Exotic(PC pc,PetscViewer viewer)
678 {
679   PC_MG          *mg = (PC_MG*)pc->data;
680   PetscBool      iascii;
681   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
682 
683   PetscFunctionBegin;
684   CHKERRQ(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
685   if (iascii) {
686     CHKERRQ(PetscViewerASCIIPrintf(viewer,"    Exotic type = %s\n",PCExoticTypes[ctx->type]));
687     if (ctx->directSolve) {
688       CHKERRQ(PetscViewerASCIIPrintf(viewer,"      Using direct solver to construct interpolation\n"));
689     } else {
690       PetscViewer sviewer;
691       PetscMPIInt rank;
692 
693       CHKERRQ(PetscViewerASCIIPrintf(viewer,"      Using iterative solver to construct interpolation\n"));
694       CHKERRQ(PetscViewerASCIIPushTab(viewer));
695       CHKERRQ(PetscViewerASCIIPushTab(viewer));  /* should not need to push this twice? */
696       CHKERRQ(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
697       CHKERRMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank));
698       if (rank == 0) {
699         CHKERRQ(KSPView(ctx->ksp,sviewer));
700       }
701       CHKERRQ(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
702       CHKERRQ(PetscViewerASCIIPopTab(viewer));
703       CHKERRQ(PetscViewerASCIIPopTab(viewer));
704     }
705   }
706   CHKERRQ(PCView_MG(pc,viewer));
707   PetscFunctionReturn(0);
708 }
709 
710 PetscErrorCode PCSetFromOptions_Exotic(PetscOptionItems *PetscOptionsObject,PC pc)
711 {
712   PetscBool      flg;
713   PC_MG          *mg = (PC_MG*)pc->data;
714   PCExoticType   mgctype;
715   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
716 
717   PetscFunctionBegin;
718   CHKERRQ(PetscOptionsHead(PetscOptionsObject,"Exotic coarse space options"));
719   CHKERRQ(PetscOptionsEnum("-pc_exotic_type","face or wirebasket","PCExoticSetType",PCExoticTypes,(PetscEnum)ctx->type,(PetscEnum*)&mgctype,&flg));
720   if (flg) {
721     CHKERRQ(PCExoticSetType(pc,mgctype));
722   }
723   CHKERRQ(PetscOptionsBool("-pc_exotic_direct_solver","use direct solver to construct interpolation","None",ctx->directSolve,&ctx->directSolve,NULL));
724   if (!ctx->directSolve) {
725     if (!ctx->ksp) {
726       const char *prefix;
727       CHKERRQ(KSPCreate(PETSC_COMM_SELF,&ctx->ksp));
728       CHKERRQ(KSPSetErrorIfNotConverged(ctx->ksp,pc->erroriffailure));
729       CHKERRQ(PetscObjectIncrementTabLevel((PetscObject)ctx->ksp,(PetscObject)pc,1));
730       CHKERRQ(PetscLogObjectParent((PetscObject)pc,(PetscObject)ctx->ksp));
731       CHKERRQ(PCGetOptionsPrefix(pc,&prefix));
732       CHKERRQ(KSPSetOptionsPrefix(ctx->ksp,prefix));
733       CHKERRQ(KSPAppendOptionsPrefix(ctx->ksp,"exotic_"));
734     }
735     CHKERRQ(KSPSetFromOptions(ctx->ksp));
736   }
737   CHKERRQ(PetscOptionsTail());
738   PetscFunctionReturn(0);
739 }
740 
741 /*MC
742      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces
743 
744      This uses the PCMG infrastructure restricted to two levels and the face and wirebasket based coarse
745    grid spaces.
746 
747    Notes:
748     By default this uses GMRES on the fine grid smoother so this should be used with KSPFGMRES or the smoother changed to not use GMRES
749 
750    References:
751 +  * - These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz, "The Construction
752    of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53, 1989.
753 .  * - They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith,
754    New York University, 1990.
755 .  * - They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis
756    of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical
757    Analysis, volume 31. 1994. These were developed in the context of iterative substructuring preconditioners.
758 .  * - They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
759    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example,
760    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco
761    Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
762    of the 17th International Conference on Domain Decomposition Methods in
763    Science and Engineering, held in Strobl, Austria, 2006, number 60 in
764    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007.
765 .  * -  Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy minimizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer,
766    Marco Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
767    of the 17th International Conference on Domain Decomposition Methods
768    in Science and Engineering, held in Strobl, Austria, 2006, number 60 in
769    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007
770 .  * - Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition
771    for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J.
772    Numer. Anal., 46(4), 2008.
773 -  * - Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz
774    algorithm for almost incompressible elasticity. Technical Report
775    TR2008 912, Department of Computer Science, Courant Institute
776    of Mathematical Sciences, New York University, May 2008. URL:
777 
778    Options Database: The usual PCMG options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type>
779       -pc_mg_type <type>
780 
781    Level: advanced
782 
783 .seealso:  PCMG, PCSetDM(), PCExoticType, PCExoticSetType()
784 M*/
785 
786 PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc)
787 {
788   PC_Exotic      *ex;
789   PC_MG          *mg;
790 
791   PetscFunctionBegin;
792   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
793   if (pc->ops->destroy) {
794     CHKERRQ((*pc->ops->destroy)(pc));
795     pc->data = NULL;
796   }
797   CHKERRQ(PetscFree(((PetscObject)pc)->type_name));
798   ((PetscObject)pc)->type_name = NULL;
799 
800   CHKERRQ(PCSetType(pc,PCMG));
801   CHKERRQ(PCMGSetLevels(pc,2,NULL));
802   CHKERRQ(PCMGSetGalerkin(pc,PC_MG_GALERKIN_PMAT));
803   CHKERRQ(PetscNew(&ex)); \
804   ex->type     = PC_EXOTIC_FACE;
805   mg           = (PC_MG*) pc->data;
806   mg->innerctx = ex;
807 
808   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
809   pc->ops->view           = PCView_Exotic;
810   pc->ops->destroy        = PCDestroy_Exotic;
811   pc->ops->setup          = PCSetUp_Exotic;
812 
813   CHKERRQ(PetscObjectComposeFunction((PetscObject)pc,"PCExoticSetType_C",PCExoticSetType_Exotic));
814   PetscFunctionReturn(0);
815 }
816