xref: /petsc/src/ksp/pc/impls/wb/wb.c (revision 55e7fe800d976e85ed2b5cd8bfdef564daa37bd9)
1 
2 #include <petscdmda.h>   /*I "petscdmda.h" I*/
3 #include <petsc/private/pcmgimpl.h>   /*I "petscksp.h" I*/
4 #include <petscctable.h>
5 
6 typedef struct {
7   PCExoticType type;
8   Mat          P;            /* the constructed interpolation matrix */
9   PetscBool    directSolve;  /* use direct LU factorization to construct interpolation */
10   KSP          ksp;
11 } PC_Exotic;
12 
13 const char *const PCExoticTypes[] = {"face","wirebasket","PCExoticType","PC_Exotic",0};
14 
15 
16 /*
17       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
18 
19 */
20 PetscErrorCode DMDAGetWireBasketInterpolation(DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
21 {
22   PetscErrorCode         ierr;
23   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
24   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[26],*globals,Ng,*IIint,*IIsurf,Nt;
25   Mat                    Xint, Xsurf,Xint_tmp;
26   IS                     isint,issurf,is,row,col;
27   ISLocalToGlobalMapping ltg;
28   MPI_Comm               comm;
29   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
30   MatFactorInfo          info;
31   PetscScalar            *xsurf,*xint;
32 #if defined(PETSC_USE_DEBUG_foo)
33   PetscScalar            tmp;
34 #endif
35   PetscTable             ht;
36 
37   PetscFunctionBegin;
38   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
39   if (dof != 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
40   if (dim != 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
41   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
42   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
43   istart = istart ? -1 : 0;
44   jstart = jstart ? -1 : 0;
45   kstart = kstart ? -1 : 0;
46 
47   /*
48     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
49     to all the local degrees of freedom (this includes the vertices, edges and faces).
50 
51     Xint are the subset of the interpolation into the interior
52 
53     Xface are the interpolation onto faces but not into the interior
54 
55     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
56                                       Xint
57     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
58                                       Xsurf
59   */
60   N     = (m - istart)*(n - jstart)*(p - kstart);
61   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
62   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
63   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
64   Nsurf = Nface + Nwire;
65   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,26,NULL,&Xint);CHKERRQ(ierr);
66   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,26,NULL,&Xsurf);CHKERRQ(ierr);
67   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
68 
69   /*
70      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
71      Xsurf will be all zero (thus making the coarse matrix singular).
72   */
73   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
74   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
75   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
76 
77   cnt = 0;
78 
79   xsurf[cnt++] = 1;
80   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + Nsurf] = 1;
81   xsurf[cnt++ + 2*Nsurf] = 1;
82 
83   for (j=1; j<n-1-jstart; j++) {
84     xsurf[cnt++ + 3*Nsurf] = 1;
85     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
86     xsurf[cnt++ + 5*Nsurf] = 1;
87   }
88 
89   xsurf[cnt++ + 6*Nsurf] = 1;
90   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 7*Nsurf] = 1;
91   xsurf[cnt++ + 8*Nsurf] = 1;
92 
93   for (k=1; k<p-1-kstart; k++) {
94     xsurf[cnt++ + 9*Nsurf] = 1;
95     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 10*Nsurf] = 1;
96     xsurf[cnt++ + 11*Nsurf] = 1;
97 
98     for (j=1; j<n-1-jstart; j++) {
99       xsurf[cnt++ + 12*Nsurf] = 1;
100       /* these are the interior nodes */
101       xsurf[cnt++ + 13*Nsurf] = 1;
102     }
103 
104     xsurf[cnt++ + 14*Nsurf] = 1;
105     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 15*Nsurf] = 1;
106     xsurf[cnt++ + 16*Nsurf] = 1;
107   }
108 
109   xsurf[cnt++ + 17*Nsurf] = 1;
110   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 18*Nsurf] = 1;
111   xsurf[cnt++ + 19*Nsurf] = 1;
112 
113   for (j=1;j<n-1-jstart;j++) {
114     xsurf[cnt++ + 20*Nsurf] = 1;
115     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 21*Nsurf] = 1;
116     xsurf[cnt++ + 22*Nsurf] = 1;
117   }
118 
119   xsurf[cnt++ + 23*Nsurf] = 1;
120   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 24*Nsurf] = 1;
121   xsurf[cnt++ + 25*Nsurf] = 1;
122 
123 
124   /* interpolations only sum to 1 when using direct solver */
125 #if defined(PETSC_USE_DEBUG_foo)
126   for (i=0; i<Nsurf; i++) {
127     tmp = 0.0;
128     for (j=0; j<26; j++) tmp += xsurf[i+j*Nsurf];
129     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
130   }
131 #endif
132   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
133   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
134 
135 
136   /*
137        I are the indices for all the needed vertices (in global numbering)
138        Iint are the indices for the interior values, I surf for the surface values
139             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
140              is NOT the local DMDA ordering.)
141        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
142   */
143 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
144   ierr = PetscMalloc3(N,&II,Nint,&Iint,Nsurf,&Isurf);CHKERRQ(ierr);
145   ierr = PetscMalloc2(Nint,&IIint,Nsurf,&IIsurf);CHKERRQ(ierr);
146   for (k=0; k<p-kstart; k++) {
147     for (j=0; j<n-jstart; j++) {
148       for (i=0; i<m-istart; i++) {
149         II[c++] = i + j*mwidth + k*mwidth*nwidth;
150 
151         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
152           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
153           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
154         } else {
155           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
156           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
157         }
158       }
159     }
160   }
161   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
162   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
163   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
164   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
165   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
166   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
167   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
168   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
169   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
170   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
171   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
172   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
173 
174   ierr = MatCreateSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
175   A    = *Aholder;
176   ierr = PetscFree(Aholder);CHKERRQ(ierr);
177 
178   ierr = MatCreateSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
179   ierr = MatCreateSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
180   ierr = MatCreateSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
181 
182   /*
183      Solve for the interpolation onto the interior Xint
184   */
185   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
186   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
187   if (exotic->directSolve) {
188     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
189     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
190     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
191     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
192     ierr = ISDestroy(&row);CHKERRQ(ierr);
193     ierr = ISDestroy(&col);CHKERRQ(ierr);
194     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
195     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
196     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
197   } else {
198     Vec         b,x;
199     PetscScalar *xint_tmp;
200 
201     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
202     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
203     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
204     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
205     ierr = KSPSetOperators(exotic->ksp,Aii,Aii);CHKERRQ(ierr);
206     for (i=0; i<26; i++) {
207       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
208       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
209       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
210       ierr = VecResetArray(x);CHKERRQ(ierr);
211       ierr = VecResetArray(b);CHKERRQ(ierr);
212     }
213     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
214     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
215     ierr = VecDestroy(&x);CHKERRQ(ierr);
216     ierr = VecDestroy(&b);CHKERRQ(ierr);
217   }
218   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
219 
220 #if defined(PETSC_USE_DEBUG_foo)
221   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
222   for (i=0; i<Nint; i++) {
223     tmp = 0.0;
224     for (j=0; j<26; j++) tmp += xint[i+j*Nint];
225 
226     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
227   }
228   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
229   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
230 #endif
231 
232 
233   /*         total vertices             total faces                                  total edges */
234   Ntotal = (mp + 1)*(np + 1)*(pp + 1) + mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1) + mp*(np+1)*(pp+1) + np*(mp+1)*(pp+1) +  pp*(mp+1)*(np+1);
235 
236   /*
237       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
238   */
239   cnt = 0;
240 
241   gl[cnt++] = 0;  { gl[cnt++] = 1;} gl[cnt++] = m-istart-1;
242   { gl[cnt++] = mwidth;  { gl[cnt++] = mwidth+1;} gl[cnt++] = mwidth + m-istart-1;}
243   gl[cnt++] = mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*(n-jstart-1) + m-istart-1;
244   {
245     gl[cnt++] = mwidth*nwidth;  { gl[cnt++] = mwidth*nwidth+1;}  gl[cnt++] = mwidth*nwidth+ m-istart-1;
246     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
247     gl[cnt++] = mwidth*nwidth+ mwidth*(n-jstart-1);   { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1) + m-istart-1;
248   }
249   gl[cnt++] = mwidth*nwidth*(p-kstart-1); { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  m-istart-1;
250   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth;   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1)+mwidth+m-istart-1;}
251   gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+ mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth*(n-jstart-1) + m-istart-1;
252 
253   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
254   /* convert that to global numbering and get them on all processes */
255   ierr = ISLocalToGlobalMappingApply(ltg,26,gl,gl);CHKERRQ(ierr);
256   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
257   ierr = PetscMalloc1(26*mp*np*pp,&globals);CHKERRQ(ierr);
258   ierr = MPI_Allgather(gl,26,MPIU_INT,globals,26,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
259 
260   /* Number the coarse grid points from 0 to Ntotal */
261   ierr = MatGetSize(Aglobal,&Nt,NULL);CHKERRQ(ierr);
262   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
263   for (i=0; i<26*mp*np*pp; i++) {
264     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
265   }
266   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
267   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
268   ierr = PetscFree(globals);CHKERRQ(ierr);
269   for (i=0; i<26; i++) {
270     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
271     gl[i]--;
272   }
273   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
274   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
275 
276   /* construct global interpolation matrix */
277   ierr = MatGetLocalSize(Aglobal,&Ng,NULL);CHKERRQ(ierr);
278   if (reuse == MAT_INITIAL_MATRIX) {
279     ierr = MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint+Nsurf,NULL,P);CHKERRQ(ierr);
280   } else {
281     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
282   }
283   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
284   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
285   ierr = MatSetValues(*P,Nint,IIint,26,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
286   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
287   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
288   ierr = MatSetValues(*P,Nsurf,IIsurf,26,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
289   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
290   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
291   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
292   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
293 
294 #if defined(PETSC_USE_DEBUG_foo)
295   {
296     Vec         x,y;
297     PetscScalar *yy;
298     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
299     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
300     ierr = VecSet(x,1.0);CHKERRQ(ierr);
301     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
302     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
303     for (i=0; i<Ng; i++) {
304       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %g",i,(double)PetscAbsScalar(yy[i]));
305     }
306     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
307     ierr = VecDestroy(x);CHKERRQ(ierr);
308     ierr = VecDestroy(y);CHKERRQ(ierr);
309   }
310 #endif
311 
312   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
313   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
314   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
315   ierr = MatDestroy(&A);CHKERRQ(ierr);
316   ierr = ISDestroy(&is);CHKERRQ(ierr);
317   ierr = ISDestroy(&isint);CHKERRQ(ierr);
318   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
319   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
320   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 /*
325       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
326 
327 */
328 PetscErrorCode DMDAGetFaceInterpolation(DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
329 {
330   PetscErrorCode         ierr;
331   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
332   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[6],*globals,Ng,*IIint,*IIsurf,Nt;
333   Mat                    Xint, Xsurf,Xint_tmp;
334   IS                     isint,issurf,is,row,col;
335   ISLocalToGlobalMapping ltg;
336   MPI_Comm               comm;
337   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
338   MatFactorInfo          info;
339   PetscScalar            *xsurf,*xint;
340 #if defined(PETSC_USE_DEBUG_foo)
341   PetscScalar            tmp;
342 #endif
343   PetscTable             ht;
344 
345   PetscFunctionBegin;
346   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
347   if (dof != 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
348   if (dim != 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
349   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
350   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
351   istart = istart ? -1 : 0;
352   jstart = jstart ? -1 : 0;
353   kstart = kstart ? -1 : 0;
354 
355   /*
356     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
357     to all the local degrees of freedom (this includes the vertices, edges and faces).
358 
359     Xint are the subset of the interpolation into the interior
360 
361     Xface are the interpolation onto faces but not into the interior
362 
363     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
364                                       Xint
365     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
366                                       Xsurf
367   */
368   N     = (m - istart)*(n - jstart)*(p - kstart);
369   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
370   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
371   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
372   Nsurf = Nface + Nwire;
373   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,6,NULL,&Xint);CHKERRQ(ierr);
374   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,6,NULL,&Xsurf);CHKERRQ(ierr);
375   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
376 
377   /*
378      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
379      Xsurf will be all zero (thus making the coarse matrix singular).
380   */
381   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
382   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
383   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
384 
385   cnt = 0;
386   for (j=1; j<n-1-jstart; j++) {
387     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 0*Nsurf] = 1;
388   }
389 
390   for (k=1; k<p-1-kstart; k++) {
391     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 1*Nsurf] = 1;
392     for (j=1; j<n-1-jstart; j++) {
393       xsurf[cnt++ + 2*Nsurf] = 1;
394       /* these are the interior nodes */
395       xsurf[cnt++ + 3*Nsurf] = 1;
396     }
397     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
398   }
399   for (j=1;j<n-1-jstart;j++) {
400     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 5*Nsurf] = 1;
401   }
402 
403 #if defined(PETSC_USE_DEBUG_foo)
404   for (i=0; i<Nsurf; i++) {
405     tmp = 0.0;
406     for (j=0; j<6; j++) tmp += xsurf[i+j*Nsurf];
407 
408     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
409   }
410 #endif
411   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
412   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
413 
414 
415   /*
416        I are the indices for all the needed vertices (in global numbering)
417        Iint are the indices for the interior values, I surf for the surface values
418             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
419              is NOT the local DMDA ordering.)
420        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
421   */
422 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
423   ierr = PetscMalloc3(N,&II,Nint,&Iint,Nsurf,&Isurf);CHKERRQ(ierr);
424   ierr = PetscMalloc2(Nint,&IIint,Nsurf,&IIsurf);CHKERRQ(ierr);
425   for (k=0; k<p-kstart; k++) {
426     for (j=0; j<n-jstart; j++) {
427       for (i=0; i<m-istart; i++) {
428         II[c++] = i + j*mwidth + k*mwidth*nwidth;
429 
430         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
431           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
432           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
433         } else {
434           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
435           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
436         }
437       }
438     }
439   }
440   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
441   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
442   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
443   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
444   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
445   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
446   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
447   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
448   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
449   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
450   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
451   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
452 
453   ierr = ISSort(is);CHKERRQ(ierr);
454   ierr = MatCreateSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
455   A    = *Aholder;
456   ierr = PetscFree(Aholder);CHKERRQ(ierr);
457 
458   ierr = MatCreateSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
459   ierr = MatCreateSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
460   ierr = MatCreateSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
461 
462   /*
463      Solve for the interpolation onto the interior Xint
464   */
465   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
466   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
467 
468   if (exotic->directSolve) {
469     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
470     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
471     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
472     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
473     ierr = ISDestroy(&row);CHKERRQ(ierr);
474     ierr = ISDestroy(&col);CHKERRQ(ierr);
475     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
476     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
477     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
478   } else {
479     Vec         b,x;
480     PetscScalar *xint_tmp;
481 
482     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
483     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
484     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
485     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
486     ierr = KSPSetOperators(exotic->ksp,Aii,Aii);CHKERRQ(ierr);
487     for (i=0; i<6; i++) {
488       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
489       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
490       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
491       ierr = VecResetArray(x);CHKERRQ(ierr);
492       ierr = VecResetArray(b);CHKERRQ(ierr);
493     }
494     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
495     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
496     ierr = VecDestroy(&x);CHKERRQ(ierr);
497     ierr = VecDestroy(&b);CHKERRQ(ierr);
498   }
499   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
500 
501 #if defined(PETSC_USE_DEBUG_foo)
502   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
503   for (i=0; i<Nint; i++) {
504     tmp = 0.0;
505     for (j=0; j<6; j++) tmp += xint[i+j*Nint];
506 
507     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
508   }
509   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
510   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
511 #endif
512 
513 
514   /*         total faces    */
515   Ntotal =  mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1);
516 
517   /*
518       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
519   */
520   cnt = 0;
521   { gl[cnt++] = mwidth+1;}
522   {
523     { gl[cnt++] = mwidth*nwidth+1;}
524     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
525     { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;}
526   }
527   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;}
528 
529   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
530   /* convert that to global numbering and get them on all processes */
531   ierr = ISLocalToGlobalMappingApply(ltg,6,gl,gl);CHKERRQ(ierr);
532   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
533   ierr = PetscMalloc1(6*mp*np*pp,&globals);CHKERRQ(ierr);
534   ierr = MPI_Allgather(gl,6,MPIU_INT,globals,6,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
535 
536   /* Number the coarse grid points from 0 to Ntotal */
537   ierr = MatGetSize(Aglobal,&Nt,NULL);CHKERRQ(ierr);
538   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
539   for (i=0; i<6*mp*np*pp; i++) {
540     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
541   }
542   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
543   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
544   ierr = PetscFree(globals);CHKERRQ(ierr);
545   for (i=0; i<6; i++) {
546     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
547     gl[i]--;
548   }
549   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
550   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
551 
552   /* construct global interpolation matrix */
553   ierr = MatGetLocalSize(Aglobal,&Ng,NULL);CHKERRQ(ierr);
554   if (reuse == MAT_INITIAL_MATRIX) {
555     ierr = MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint,NULL,P);CHKERRQ(ierr);
556   } else {
557     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
558   }
559   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
560   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
561   ierr = MatSetValues(*P,Nint,IIint,6,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
562   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
563   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
564   ierr = MatSetValues(*P,Nsurf,IIsurf,6,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
565   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
566   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
567   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
568   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
569 
570 
571 #if defined(PETSC_USE_DEBUG_foo)
572   {
573     Vec         x,y;
574     PetscScalar *yy;
575     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
576     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
577     ierr = VecSet(x,1.0);CHKERRQ(ierr);
578     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
579     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
580     for (i=0; i<Ng; i++) {
581       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %g",i,(double)PetscAbsScalar(yy[i]));
582     }
583     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
584     ierr = VecDestroy(x);CHKERRQ(ierr);
585     ierr = VecDestroy(y);CHKERRQ(ierr);
586   }
587 #endif
588 
589   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
590   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
591   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
592   ierr = MatDestroy(&A);CHKERRQ(ierr);
593   ierr = ISDestroy(&is);CHKERRQ(ierr);
594   ierr = ISDestroy(&isint);CHKERRQ(ierr);
595   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
596   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
597   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
598   PetscFunctionReturn(0);
599 }
600 
601 
602 /*@
603    PCExoticSetType - Sets the type of coarse grid interpolation to use
604 
605    Logically Collective on PC
606 
607    Input Parameters:
608 +  pc - the preconditioner context
609 -  type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)
610 
611    Notes:
612     The face based interpolation has 1 degree of freedom per face and ignores the
613      edge and vertex values completely in the coarse problem. For any seven point
614      stencil the interpolation of a constant on all faces into the interior is that constant.
615 
616      The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
617      per face. A constant on the subdomain boundary is interpolated as that constant
618      in the interior of the domain.
619 
620      The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
621      if A is nonsingular A_c is also nonsingular.
622 
623      Both interpolations are suitable for only scalar problems.
624 
625    Level: intermediate
626 
627 
628 .seealso: PCEXOTIC, PCExoticType()
629 @*/
630 PetscErrorCode  PCExoticSetType(PC pc,PCExoticType type)
631 {
632   PetscErrorCode ierr;
633 
634   PetscFunctionBegin;
635   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
636   PetscValidLogicalCollectiveEnum(pc,type,2);
637   ierr = PetscTryMethod(pc,"PCExoticSetType_C",(PC,PCExoticType),(pc,type));CHKERRQ(ierr);
638   PetscFunctionReturn(0);
639 }
640 
641 static PetscErrorCode  PCExoticSetType_Exotic(PC pc,PCExoticType type)
642 {
643   PC_MG     *mg  = (PC_MG*)pc->data;
644   PC_Exotic *ctx = (PC_Exotic*) mg->innerctx;
645 
646   PetscFunctionBegin;
647   ctx->type = type;
648   PetscFunctionReturn(0);
649 }
650 
651 PetscErrorCode PCSetUp_Exotic(PC pc)
652 {
653   PetscErrorCode ierr;
654   Mat            A;
655   PC_MG          *mg   = (PC_MG*)pc->data;
656   PC_Exotic      *ex   = (PC_Exotic*) mg->innerctx;
657   MatReuse       reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
658 
659   PetscFunctionBegin;
660   if (!pc->dm) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Need to call PCSetDM() before using this PC");
661   ierr = PCGetOperators(pc,NULL,&A);CHKERRQ(ierr);
662   if (ex->type == PC_EXOTIC_FACE) {
663     ierr = DMDAGetFaceInterpolation(pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
664   } else if (ex->type == PC_EXOTIC_WIREBASKET) {
665     ierr = DMDAGetWireBasketInterpolation(pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
666   } else SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"Unknown exotic coarse space %d",ex->type);
667   ierr = PCMGSetInterpolation(pc,1,ex->P);CHKERRQ(ierr);
668   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
669   ierr = PCSetDM(pc,NULL);CHKERRQ(ierr);
670   ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
671   PetscFunctionReturn(0);
672 }
673 
674 PetscErrorCode PCDestroy_Exotic(PC pc)
675 {
676   PetscErrorCode ierr;
677   PC_MG          *mg  = (PC_MG*)pc->data;
678   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
679 
680   PetscFunctionBegin;
681   ierr = MatDestroy(&ctx->P);CHKERRQ(ierr);
682   ierr = KSPDestroy(&ctx->ksp);CHKERRQ(ierr);
683   ierr = PetscFree(ctx);CHKERRQ(ierr);
684   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
685   PetscFunctionReturn(0);
686 }
687 
688 PetscErrorCode PCView_Exotic(PC pc,PetscViewer viewer)
689 {
690   PC_MG          *mg = (PC_MG*)pc->data;
691   PetscErrorCode ierr;
692   PetscBool      iascii;
693   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
694 
695   PetscFunctionBegin;
696   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
697   if (iascii) {
698     ierr = PetscViewerASCIIPrintf(viewer,"    Exotic type = %s\n",PCExoticTypes[ctx->type]);CHKERRQ(ierr);
699     if (ctx->directSolve) {
700       ierr = PetscViewerASCIIPrintf(viewer,"      Using direct solver to construct interpolation\n");CHKERRQ(ierr);
701     } else {
702       PetscViewer sviewer;
703       PetscMPIInt rank;
704 
705       ierr = PetscViewerASCIIPrintf(viewer,"      Using iterative solver to construct interpolation\n");CHKERRQ(ierr);
706       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
707       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);  /* should not need to push this twice? */
708       ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
709       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
710       if (!rank) {
711         ierr = KSPView(ctx->ksp,sviewer);CHKERRQ(ierr);
712       }
713       ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
714       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
715       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
716     }
717   }
718   ierr = PCView_MG(pc,viewer);CHKERRQ(ierr);
719   PetscFunctionReturn(0);
720 }
721 
722 PetscErrorCode PCSetFromOptions_Exotic(PetscOptionItems *PetscOptionsObject,PC pc)
723 {
724   PetscErrorCode ierr;
725   PetscBool      flg;
726   PC_MG          *mg = (PC_MG*)pc->data;
727   PCExoticType   mgctype;
728   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
729 
730   PetscFunctionBegin;
731   ierr = PetscOptionsHead(PetscOptionsObject,"Exotic coarse space options");CHKERRQ(ierr);
732   ierr = PetscOptionsEnum("-pc_exotic_type","face or wirebasket","PCExoticSetType",PCExoticTypes,(PetscEnum)ctx->type,(PetscEnum*)&mgctype,&flg);CHKERRQ(ierr);
733   if (flg) {
734     ierr = PCExoticSetType(pc,mgctype);CHKERRQ(ierr);
735   }
736   ierr = PetscOptionsBool("-pc_exotic_direct_solver","use direct solver to construct interpolation","None",ctx->directSolve,&ctx->directSolve,NULL);CHKERRQ(ierr);
737   if (!ctx->directSolve) {
738     if (!ctx->ksp) {
739       const char *prefix;
740       ierr = KSPCreate(PETSC_COMM_SELF,&ctx->ksp);CHKERRQ(ierr);
741       ierr = KSPSetErrorIfNotConverged(ctx->ksp,pc->erroriffailure);CHKERRQ(ierr);
742       ierr = PetscObjectIncrementTabLevel((PetscObject)ctx->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
743       ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ctx->ksp);CHKERRQ(ierr);
744       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
745       ierr = KSPSetOptionsPrefix(ctx->ksp,prefix);CHKERRQ(ierr);
746       ierr = KSPAppendOptionsPrefix(ctx->ksp,"exotic_");CHKERRQ(ierr);
747     }
748     ierr = KSPSetFromOptions(ctx->ksp);CHKERRQ(ierr);
749   }
750   ierr = PetscOptionsTail();CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 
755 /*MC
756      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces
757 
758      This uses the PCMG infrastructure restricted to two levels and the face and wirebasket based coarse
759    grid spaces.
760 
761    Notes:
762     By default this uses GMRES on the fine grid smoother so this should be used with KSPFGMRES or the smoother changed to not use GMRES
763 
764    References:
765 +  1. - These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz, "The Construction
766    of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53, 1989.
767 .  2. - They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith,
768    New York University, 1990.
769 .  3. - They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis
770    of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical
771    Analysis, volume 31. 1994. These were developed in the context of iterative substructuring preconditioners.
772 .  4. - They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
773    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example,
774    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco
775    Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
776    of the 17th International Conference on Domain Decomposition Methods in
777    Science and Engineering, held in Strobl, Austria, 2006, number 60 in
778    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007.
779 .  5. -  Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy minimizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer,
780    Marco Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
781    of the 17th International Conference on Domain Decomposition Methods
782    in Science and Engineering, held in Strobl, Austria, 2006, number 60 in
783    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007
784 .  6. - Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition
785    for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J.
786    Numer. Anal., 46(4), 2008.
787 -  7. - Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz
788    algorithm for almost incompressible elasticity. Technical Report
789    TR2008 912, Department of Computer Science, Courant Institute
790    of Mathematical Sciences, New York University, May 2008. URL:
791 
792    Options Database: The usual PCMG options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type>
793       -pc_mg_type <type>
794 
795    Level: advanced
796 
797 .seealso:  PCMG, PCSetDM(), PCExoticType, PCExoticSetType()
798 M*/
799 
800 PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc)
801 {
802   PetscErrorCode ierr;
803   PC_Exotic      *ex;
804   PC_MG          *mg;
805 
806   PetscFunctionBegin;
807   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
808   if (pc->ops->destroy) {
809     ierr =  (*pc->ops->destroy)(pc);CHKERRQ(ierr);
810     pc->data = 0;
811   }
812   ierr = PetscFree(((PetscObject)pc)->type_name);CHKERRQ(ierr);
813   ((PetscObject)pc)->type_name = 0;
814 
815   ierr         = PCSetType(pc,PCMG);CHKERRQ(ierr);
816   ierr         = PCMGSetLevels(pc,2,NULL);CHKERRQ(ierr);
817   ierr         = PCMGSetGalerkin(pc,PC_MG_GALERKIN_PMAT);CHKERRQ(ierr);
818   ierr         = PetscNew(&ex);CHKERRQ(ierr); \
819   ex->type     = PC_EXOTIC_FACE;
820   mg           = (PC_MG*) pc->data;
821   mg->innerctx = ex;
822 
823 
824   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
825   pc->ops->view           = PCView_Exotic;
826   pc->ops->destroy        = PCDestroy_Exotic;
827   pc->ops->setup          = PCSetUp_Exotic;
828 
829   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCExoticSetType_C",PCExoticSetType_Exotic);CHKERRQ(ierr);
830   PetscFunctionReturn(0);
831 }
832