xref: /petsc/src/ksp/pc/impls/wb/wb.c (revision b4876bcb73c673da1466dabf4fdfd52d4f02e063)
1 
2 #include <petscdmda.h>   /*I "petscdmda.h" I*/
3 #include <../src/ksp/pc/impls/mg/mgimpl.h>   /*I "petscksp.h" I*/
4 
5 typedef struct {
6   PCExoticType type;
7   Mat          P;            /* the constructed interpolation matrix */
8   PetscBool    directSolve;  /* use direct LU factorization to construct interpolation */
9   KSP          ksp;
10 } PC_Exotic;
11 
12 const char *const PCExoticTypes[] = {"face","wirebasket","PCExoticType","PC_Exotic",0};
13 
14 
15 #undef __FUNCT__
16 #define __FUNCT__ "DMDAGetWireBasketInterpolation"
17 /*
18       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
19 
20 */
21 PetscErrorCode DMDAGetWireBasketInterpolation(DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
22 {
23   PetscErrorCode         ierr;
24   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
25   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[26],*globals,Ng,*IIint,*IIsurf,Nt;
26   Mat                    Xint, Xsurf,Xint_tmp;
27   IS                     isint,issurf,is,row,col;
28   ISLocalToGlobalMapping ltg;
29   MPI_Comm               comm;
30   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
31   MatFactorInfo          info;
32   PetscScalar            *xsurf,*xint;
33 #if defined(PETSC_USE_DEBUG_foo)
34   PetscScalar            tmp;
35 #endif
36   PetscTable             ht;
37 
38   PetscFunctionBegin;
39   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
40   if (dof != 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
41   if (dim != 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
42   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
43   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
44   istart = istart ? -1 : 0;
45   jstart = jstart ? -1 : 0;
46   kstart = kstart ? -1 : 0;
47 
48   /*
49     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
50     to all the local degrees of freedom (this includes the vertices, edges and faces).
51 
52     Xint are the subset of the interpolation into the interior
53 
54     Xface are the interpolation onto faces but not into the interior
55 
56     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
57                                       Xint
58     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
59                                       Xsurf
60   */
61   N     = (m - istart)*(n - jstart)*(p - kstart);
62   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
63   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
64   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
65   Nsurf = Nface + Nwire;
66   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,26,NULL,&Xint);CHKERRQ(ierr);
67   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,26,NULL,&Xsurf);CHKERRQ(ierr);
68   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
69 
70   /*
71      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
72      Xsurf will be all zero (thus making the coarse matrix singular).
73   */
74   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
75   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
76   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
77 
78   cnt = 0;
79 
80   xsurf[cnt++] = 1;
81   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + Nsurf] = 1;
82   xsurf[cnt++ + 2*Nsurf] = 1;
83 
84   for (j=1; j<n-1-jstart; j++) {
85     xsurf[cnt++ + 3*Nsurf] = 1;
86     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
87     xsurf[cnt++ + 5*Nsurf] = 1;
88   }
89 
90   xsurf[cnt++ + 6*Nsurf] = 1;
91   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 7*Nsurf] = 1;
92   xsurf[cnt++ + 8*Nsurf] = 1;
93 
94   for (k=1; k<p-1-kstart; k++) {
95     xsurf[cnt++ + 9*Nsurf] = 1;
96     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 10*Nsurf] = 1;
97     xsurf[cnt++ + 11*Nsurf] = 1;
98 
99     for (j=1; j<n-1-jstart; j++) {
100       xsurf[cnt++ + 12*Nsurf] = 1;
101       /* these are the interior nodes */
102       xsurf[cnt++ + 13*Nsurf] = 1;
103     }
104 
105     xsurf[cnt++ + 14*Nsurf] = 1;
106     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 15*Nsurf] = 1;
107     xsurf[cnt++ + 16*Nsurf] = 1;
108   }
109 
110   xsurf[cnt++ + 17*Nsurf] = 1;
111   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 18*Nsurf] = 1;
112   xsurf[cnt++ + 19*Nsurf] = 1;
113 
114   for (j=1;j<n-1-jstart;j++) {
115     xsurf[cnt++ + 20*Nsurf] = 1;
116     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 21*Nsurf] = 1;
117     xsurf[cnt++ + 22*Nsurf] = 1;
118   }
119 
120   xsurf[cnt++ + 23*Nsurf] = 1;
121   for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 24*Nsurf] = 1;
122   xsurf[cnt++ + 25*Nsurf] = 1;
123 
124 
125   /* interpolations only sum to 1 when using direct solver */
126 #if defined(PETSC_USE_DEBUG_foo)
127   for (i=0; i<Nsurf; i++) {
128     tmp = 0.0;
129     for (j=0; j<26; j++) tmp += xsurf[i+j*Nsurf];
130     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %G",i,PetscAbsScalar(tmp));
131   }
132 #endif
133   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
134   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
135 
136 
137   /*
138        I are the indices for all the needed vertices (in global numbering)
139        Iint are the indices for the interior values, I surf for the surface values
140             (This is just for the part of the global matrix obtained with MatGetSubMatrix(), it
141              is NOT the local DMDA ordering.)
142        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
143   */
144 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
145   ierr = PetscMalloc3(N,PetscInt,&II,Nint,PetscInt,&Iint,Nsurf,PetscInt,&Isurf);CHKERRQ(ierr);
146   ierr = PetscMalloc2(Nint,PetscInt,&IIint,Nsurf,PetscInt,&IIsurf);CHKERRQ(ierr);
147   for (k=0; k<p-kstart; k++) {
148     for (j=0; j<n-jstart; j++) {
149       for (i=0; i<m-istart; i++) {
150         II[c++] = i + j*mwidth + k*mwidth*nwidth;
151 
152         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
153           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
154           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
155         } else {
156           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
157           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
158         }
159       }
160     }
161   }
162   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
163   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
164   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
165   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
166   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
167   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
168   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
169   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
170   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
171   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
172   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
173   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
174 
175   ierr = MatGetSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
176   A    = *Aholder;
177   ierr = PetscFree(Aholder);CHKERRQ(ierr);
178 
179   ierr = MatGetSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
180   ierr = MatGetSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
181   ierr = MatGetSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
182 
183   /*
184      Solve for the interpolation onto the interior Xint
185   */
186   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
187   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
188   if (exotic->directSolve) {
189     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
190     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
191     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
192     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
193     ierr = ISDestroy(&row);CHKERRQ(ierr);
194     ierr = ISDestroy(&col);CHKERRQ(ierr);
195     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
196     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
197     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
198   } else {
199     Vec         b,x;
200     PetscScalar *xint_tmp;
201 
202     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
203     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
204     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
205     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
206     ierr = KSPSetOperators(exotic->ksp,Aii,Aii,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
207     for (i=0; i<26; i++) {
208       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
209       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
210       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
211       ierr = VecResetArray(x);CHKERRQ(ierr);
212       ierr = VecResetArray(b);CHKERRQ(ierr);
213     }
214     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
215     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
216     ierr = VecDestroy(&x);CHKERRQ(ierr);
217     ierr = VecDestroy(&b);CHKERRQ(ierr);
218   }
219   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
220 
221 #if defined(PETSC_USE_DEBUG_foo)
222   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
223   for (i=0; i<Nint; i++) {
224     tmp = 0.0;
225     for (j=0; j<26; j++) tmp += xint[i+j*Nint];
226 
227     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %G",i,PetscAbsScalar(tmp));
228   }
229   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
230   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
231 #endif
232 
233 
234   /*         total vertices             total faces                                  total edges */
235   Ntotal = (mp + 1)*(np + 1)*(pp + 1) + mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1) + mp*(np+1)*(pp+1) + np*(mp+1)*(pp+1) +  pp*(mp+1)*(np+1);
236 
237   /*
238       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
239   */
240   cnt = 0;
241 
242   gl[cnt++] = 0;  { gl[cnt++] = 1;} gl[cnt++] = m-istart-1;
243   { gl[cnt++] = mwidth;  { gl[cnt++] = mwidth+1;} gl[cnt++] = mwidth + m-istart-1;}
244   gl[cnt++] = mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*(n-jstart-1) + m-istart-1;
245   {
246     gl[cnt++] = mwidth*nwidth;  { gl[cnt++] = mwidth*nwidth+1;}  gl[cnt++] = mwidth*nwidth+ m-istart-1;
247     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
248     gl[cnt++] = mwidth*nwidth+ mwidth*(n-jstart-1);   { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1) + m-istart-1;
249   }
250   gl[cnt++] = mwidth*nwidth*(p-kstart-1); { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  m-istart-1;
251   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth;   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1)+mwidth+m-istart-1;}
252   gl[cnt++] = mwidth*nwidth*(p-kstart-1) +  mwidth*(n-jstart-1);  { gl[cnt++] = mwidth*nwidth*(p-kstart-1)+ mwidth*(n-jstart-1)+1;} gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth*(n-jstart-1) + m-istart-1;
253 
254   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
255   /* convert that to global numbering and get them on all processes */
256   ierr = ISLocalToGlobalMappingApply(ltg,26,gl,gl);CHKERRQ(ierr);
257   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
258   ierr = PetscMalloc(26*mp*np*pp*sizeof(PetscInt),&globals);CHKERRQ(ierr);
259   ierr = MPI_Allgather(gl,26,MPIU_INT,globals,26,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
260 
261   /* Number the coarse grid points from 0 to Ntotal */
262   ierr = MatGetSize(Aglobal,&Nt,NULL);CHKERRQ(ierr);
263   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
264   for (i=0; i<26*mp*np*pp; i++) {
265     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
266   }
267   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
268   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
269   ierr = PetscFree(globals);CHKERRQ(ierr);
270   for (i=0; i<26; i++) {
271     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
272     gl[i]--;
273   }
274   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
275   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
276 
277   /* construct global interpolation matrix */
278   ierr = MatGetLocalSize(Aglobal,&Ng,NULL);CHKERRQ(ierr);
279   if (reuse == MAT_INITIAL_MATRIX) {
280     ierr = MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint+Nsurf,NULL,P);CHKERRQ(ierr);
281   } else {
282     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
283   }
284   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
285   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
286   ierr = MatSetValues(*P,Nint,IIint,26,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
287   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
288   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
289   ierr = MatSetValues(*P,Nsurf,IIsurf,26,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
290   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
291   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
292   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
293   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
294 
295 #if defined(PETSC_USE_DEBUG_foo)
296   {
297     Vec         x,y;
298     PetscScalar *yy;
299     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
300     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
301     ierr = VecSet(x,1.0);CHKERRQ(ierr);
302     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
303     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
304     for (i=0; i<Ng; i++) {
305       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %G",i,PetscAbsScalar(yy[i]));
306     }
307     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
308     ierr = VecDestroy(x);CHKERRQ(ierr);
309     ierr = VecDestroy(y);CHKERRQ(ierr);
310   }
311 #endif
312 
313   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
314   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
315   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
316   ierr = MatDestroy(&A);CHKERRQ(ierr);
317   ierr = ISDestroy(&is);CHKERRQ(ierr);
318   ierr = ISDestroy(&isint);CHKERRQ(ierr);
319   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
320   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
321   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
322   PetscFunctionReturn(0);
323 }
324 
325 #undef __FUNCT__
326 #define __FUNCT__ "DMDAGetFaceInterpolation"
327 /*
328       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
329 
330 */
331 PetscErrorCode DMDAGetFaceInterpolation(DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
332 {
333   PetscErrorCode         ierr;
334   PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
335   PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[6],*globals,Ng,*IIint,*IIsurf,Nt;
336   Mat                    Xint, Xsurf,Xint_tmp;
337   IS                     isint,issurf,is,row,col;
338   ISLocalToGlobalMapping ltg;
339   MPI_Comm               comm;
340   Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
341   MatFactorInfo          info;
342   PetscScalar            *xsurf,*xint;
343 #if defined(PETSC_USE_DEBUG_foo)
344   PetscScalar            tmp;
345 #endif
346   PetscTable             ht;
347 
348   PetscFunctionBegin;
349   ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);CHKERRQ(ierr);
350   if (dof != 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
351   if (dim != 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
352   ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);CHKERRQ(ierr);
353   ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);CHKERRQ(ierr);
354   istart = istart ? -1 : 0;
355   jstart = jstart ? -1 : 0;
356   kstart = kstart ? -1 : 0;
357 
358   /*
359     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
360     to all the local degrees of freedom (this includes the vertices, edges and faces).
361 
362     Xint are the subset of the interpolation into the interior
363 
364     Xface are the interpolation onto faces but not into the interior
365 
366     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
367                                       Xint
368     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
369                                       Xsurf
370   */
371   N     = (m - istart)*(n - jstart)*(p - kstart);
372   Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
373   Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
374   Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
375   Nsurf = Nface + Nwire;
376   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,6,NULL,&Xint);CHKERRQ(ierr);
377   ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,6,NULL,&Xsurf);CHKERRQ(ierr);
378   ierr  = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
379 
380   /*
381      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
382      Xsurf will be all zero (thus making the coarse matrix singular).
383   */
384   if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
385   if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
386   if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");
387 
388   cnt = 0;
389   for (j=1; j<n-1-jstart; j++) {
390     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 0*Nsurf] = 1;
391   }
392 
393   for (k=1; k<p-1-kstart; k++) {
394     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 1*Nsurf] = 1;
395     for (j=1; j<n-1-jstart; j++) {
396       xsurf[cnt++ + 2*Nsurf] = 1;
397       /* these are the interior nodes */
398       xsurf[cnt++ + 3*Nsurf] = 1;
399     }
400     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
401   }
402   for (j=1;j<n-1-jstart;j++) {
403     for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 5*Nsurf] = 1;
404   }
405 
406 #if defined(PETSC_USE_DEBUG_foo)
407   for (i=0; i<Nsurf; i++) {
408     tmp = 0.0;
409     for (j=0; j<6; j++) tmp += xsurf[i+j*Nsurf];
410 
411     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %G",i,PetscAbsScalar(tmp));
412   }
413 #endif
414   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
415   /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/
416 
417 
418   /*
419        I are the indices for all the needed vertices (in global numbering)
420        Iint are the indices for the interior values, I surf for the surface values
421             (This is just for the part of the global matrix obtained with MatGetSubMatrix(), it
422              is NOT the local DMDA ordering.)
423        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
424   */
425 #define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
426   ierr = PetscMalloc3(N,PetscInt,&II,Nint,PetscInt,&Iint,Nsurf,PetscInt,&Isurf);CHKERRQ(ierr);
427   ierr = PetscMalloc2(Nint,PetscInt,&IIint,Nsurf,PetscInt,&IIsurf);CHKERRQ(ierr);
428   for (k=0; k<p-kstart; k++) {
429     for (j=0; j<n-jstart; j++) {
430       for (i=0; i<m-istart; i++) {
431         II[c++] = i + j*mwidth + k*mwidth*nwidth;
432 
433         if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
434           IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
435           Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
436         } else {
437           IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
438           Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
439         }
440       }
441     }
442   }
443   if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
444   if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
445   if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
446   ierr = DMGetLocalToGlobalMapping(da,&ltg);CHKERRQ(ierr);
447   ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);CHKERRQ(ierr);
448   ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);CHKERRQ(ierr);
449   ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);CHKERRQ(ierr);
450   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
451   ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
452   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);CHKERRQ(ierr);
453   ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);CHKERRQ(ierr);
454   ierr = PetscFree3(II,Iint,Isurf);CHKERRQ(ierr);
455 
456   ierr = ISSort(is);CHKERRQ(ierr);
457   ierr = MatGetSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);CHKERRQ(ierr);
458   A    = *Aholder;
459   ierr = PetscFree(Aholder);CHKERRQ(ierr);
460 
461   ierr = MatGetSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);CHKERRQ(ierr);
462   ierr = MatGetSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);CHKERRQ(ierr);
463   ierr = MatGetSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);CHKERRQ(ierr);
464 
465   /*
466      Solve for the interpolation onto the interior Xint
467   */
468   ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);CHKERRQ(ierr);
469   ierr = MatScale(Xint_tmp,-1.0);CHKERRQ(ierr);
470 
471   if (exotic->directSolve) {
472     ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);CHKERRQ(ierr);
473     ierr = MatFactorInfoInitialize(&info);CHKERRQ(ierr);
474     ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);CHKERRQ(ierr);
475     ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);CHKERRQ(ierr);
476     ierr = ISDestroy(&row);CHKERRQ(ierr);
477     ierr = ISDestroy(&col);CHKERRQ(ierr);
478     ierr = MatLUFactorNumeric(iAii,Aii,&info);CHKERRQ(ierr);
479     ierr = MatMatSolve(iAii,Xint_tmp,Xint);CHKERRQ(ierr);
480     ierr = MatDestroy(&iAii);CHKERRQ(ierr);
481   } else {
482     Vec         b,x;
483     PetscScalar *xint_tmp;
484 
485     ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
486     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);CHKERRQ(ierr);
487     ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
488     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);CHKERRQ(ierr);
489     ierr = KSPSetOperators(exotic->ksp,Aii,Aii,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
490     for (i=0; i<6; i++) {
491       ierr = VecPlaceArray(x,xint+i*Nint);CHKERRQ(ierr);
492       ierr = VecPlaceArray(b,xint_tmp+i*Nint);CHKERRQ(ierr);
493       ierr = KSPSolve(exotic->ksp,b,x);CHKERRQ(ierr);
494       ierr = VecResetArray(x);CHKERRQ(ierr);
495       ierr = VecResetArray(b);CHKERRQ(ierr);
496     }
497     ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
498     ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);CHKERRQ(ierr);
499     ierr = VecDestroy(&x);CHKERRQ(ierr);
500     ierr = VecDestroy(&b);CHKERRQ(ierr);
501   }
502   ierr = MatDestroy(&Xint_tmp);CHKERRQ(ierr);
503 
504 #if defined(PETSC_USE_DEBUG_foo)
505   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
506   for (i=0; i<Nint; i++) {
507     tmp = 0.0;
508     for (j=0; j<6; j++) tmp += xint[i+j*Nint];
509 
510     if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %G",i,PetscAbsScalar(tmp));
511   }
512   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
513   /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
514 #endif
515 
516 
517   /*         total faces    */
518   Ntotal =  mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1);
519 
520   /*
521       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
522   */
523   cnt = 0;
524   { gl[cnt++] = mwidth+1;}
525   {
526     { gl[cnt++] = mwidth*nwidth+1;}
527     { gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;}
528     { gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;}
529   }
530   { gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;}
531 
532   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
533   /* convert that to global numbering and get them on all processes */
534   ierr = ISLocalToGlobalMappingApply(ltg,6,gl,gl);CHKERRQ(ierr);
535   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
536   ierr = PetscMalloc(6*mp*np*pp*sizeof(PetscInt),&globals);CHKERRQ(ierr);
537   ierr = MPI_Allgather(gl,6,MPIU_INT,globals,6,MPIU_INT,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
538 
539   /* Number the coarse grid points from 0 to Ntotal */
540   ierr = MatGetSize(Aglobal,&Nt,NULL);CHKERRQ(ierr);
541   ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);CHKERRQ(ierr);
542   for (i=0; i<6*mp*np*pp; i++) {
543     ierr = PetscTableAddCount(ht,globals[i]+1);CHKERRQ(ierr);
544   }
545   ierr = PetscTableGetCount(ht,&cnt);CHKERRQ(ierr);
546   if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
547   ierr = PetscFree(globals);CHKERRQ(ierr);
548   for (i=0; i<6; i++) {
549     ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);CHKERRQ(ierr);
550     gl[i]--;
551   }
552   ierr = PetscTableDestroy(&ht);CHKERRQ(ierr);
553   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
554 
555   /* construct global interpolation matrix */
556   ierr = MatGetLocalSize(Aglobal,&Ng,NULL);CHKERRQ(ierr);
557   if (reuse == MAT_INITIAL_MATRIX) {
558     ierr = MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint,NULL,P);CHKERRQ(ierr);
559   } else {
560     ierr = MatZeroEntries(*P);CHKERRQ(ierr);
561   }
562   ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
563   ierr = MatDenseGetArray(Xint,&xint);CHKERRQ(ierr);
564   ierr = MatSetValues(*P,Nint,IIint,6,gl,xint,INSERT_VALUES);CHKERRQ(ierr);
565   ierr = MatDenseRestoreArray(Xint,&xint);CHKERRQ(ierr);
566   ierr = MatDenseGetArray(Xsurf,&xsurf);CHKERRQ(ierr);
567   ierr = MatSetValues(*P,Nsurf,IIsurf,6,gl,xsurf,INSERT_VALUES);CHKERRQ(ierr);
568   ierr = MatDenseRestoreArray(Xsurf,&xsurf);CHKERRQ(ierr);
569   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
570   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
571   ierr = PetscFree2(IIint,IIsurf);CHKERRQ(ierr);
572 
573 
574 #if defined(PETSC_USE_DEBUG_foo)
575   {
576     Vec         x,y;
577     PetscScalar *yy;
578     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y);CHKERRQ(ierr);
579     ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x);CHKERRQ(ierr);
580     ierr = VecSet(x,1.0);CHKERRQ(ierr);
581     ierr = MatMult(*P,x,y);CHKERRQ(ierr);
582     ierr = VecGetArray(y,&yy);CHKERRQ(ierr);
583     for (i=0; i<Ng; i++) {
584       if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %G",i,PetscAbsScalar(yy[i]));
585     }
586     ierr = VecRestoreArray(y,&yy);CHKERRQ(ierr);
587     ierr = VecDestroy(x);CHKERRQ(ierr);
588     ierr = VecDestroy(y);CHKERRQ(ierr);
589   }
590 #endif
591 
592   ierr = MatDestroy(&Aii);CHKERRQ(ierr);
593   ierr = MatDestroy(&Ais);CHKERRQ(ierr);
594   ierr = MatDestroy(&Asi);CHKERRQ(ierr);
595   ierr = MatDestroy(&A);CHKERRQ(ierr);
596   ierr = ISDestroy(&is);CHKERRQ(ierr);
597   ierr = ISDestroy(&isint);CHKERRQ(ierr);
598   ierr = ISDestroy(&issurf);CHKERRQ(ierr);
599   ierr = MatDestroy(&Xint);CHKERRQ(ierr);
600   ierr = MatDestroy(&Xsurf);CHKERRQ(ierr);
601   PetscFunctionReturn(0);
602 }
603 
604 
605 #undef __FUNCT__
606 #define __FUNCT__ "PCExoticSetType"
607 /*@
608    PCExoticSetType - Sets the type of coarse grid interpolation to use
609 
610    Logically Collective on PC
611 
612    Input Parameters:
613 +  pc - the preconditioner context
614 -  type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)
615 
616    Notes: The face based interpolation has 1 degree of freedom per face and ignores the
617      edge and vertex values completely in the coarse problem. For any seven point
618      stencil the interpolation of a constant on all faces into the interior is that constant.
619 
620      The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
621      per face. A constant on the subdomain boundary is interpolated as that constant
622      in the interior of the domain.
623 
624      The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
625      if A is nonsingular A_c is also nonsingular.
626 
627      Both interpolations are suitable for only scalar problems.
628 
629    Level: intermediate
630 
631 
632 .seealso: PCEXOTIC, PCExoticType()
633 @*/
634 PetscErrorCode  PCExoticSetType(PC pc,PCExoticType type)
635 {
636   PetscErrorCode ierr;
637 
638   PetscFunctionBegin;
639   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
640   PetscValidLogicalCollectiveEnum(pc,type,2);
641   ierr = PetscTryMethod(pc,"PCExoticSetType_C",(PC,PCExoticType),(pc,type));CHKERRQ(ierr);
642   PetscFunctionReturn(0);
643 }
644 
645 #undef __FUNCT__
646 #define __FUNCT__ "PCExoticSetType_Exotic"
647 PetscErrorCode  PCExoticSetType_Exotic(PC pc,PCExoticType type)
648 {
649   PC_MG     *mg  = (PC_MG*)pc->data;
650   PC_Exotic *ctx = (PC_Exotic*) mg->innerctx;
651 
652   PetscFunctionBegin;
653   ctx->type = type;
654   PetscFunctionReturn(0);
655 }
656 
657 #undef __FUNCT__
658 #define __FUNCT__ "PCSetUp_Exotic"
659 PetscErrorCode PCSetUp_Exotic(PC pc)
660 {
661   PetscErrorCode ierr;
662   Mat            A;
663   PC_MG          *mg   = (PC_MG*)pc->data;
664   PC_Exotic      *ex   = (PC_Exotic*) mg->innerctx;
665   MatReuse       reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
666 
667   PetscFunctionBegin;
668   if (!pc->dm) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Need to call PCSetDM() before using this PC");
669   ierr = PCGetOperators(pc,NULL,&A,NULL);CHKERRQ(ierr);
670   if (ex->type == PC_EXOTIC_FACE) {
671     ierr = DMDAGetFaceInterpolation(pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
672   } else if (ex->type == PC_EXOTIC_WIREBASKET) {
673     ierr = DMDAGetWireBasketInterpolation(pc->dm,ex,A,reuse,&ex->P);CHKERRQ(ierr);
674   } else SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"Unknown exotic coarse space %d",ex->type);
675   ierr = PCMGSetInterpolation(pc,1,ex->P);CHKERRQ(ierr);
676   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
677   ierr = PCSetDM(pc,NULL);CHKERRQ(ierr);
678   ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "PCDestroy_Exotic"
684 PetscErrorCode PCDestroy_Exotic(PC pc)
685 {
686   PetscErrorCode ierr;
687   PC_MG          *mg  = (PC_MG*)pc->data;
688   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
689 
690   PetscFunctionBegin;
691   ierr = MatDestroy(&ctx->P);CHKERRQ(ierr);
692   ierr = KSPDestroy(&ctx->ksp);CHKERRQ(ierr);
693   ierr = PetscFree(ctx);CHKERRQ(ierr);
694   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
695   PetscFunctionReturn(0);
696 }
697 
698 #undef __FUNCT__
699 #define __FUNCT__ "PCView_Exotic"
700 PetscErrorCode PCView_Exotic(PC pc,PetscViewer viewer)
701 {
702   PC_MG          *mg = (PC_MG*)pc->data;
703   PetscErrorCode ierr;
704   PetscBool      iascii;
705   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
706 
707   PetscFunctionBegin;
708   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
709   if (iascii) {
710     ierr = PetscViewerASCIIPrintf(viewer,"    Exotic type = %s\n",PCExoticTypes[ctx->type]);CHKERRQ(ierr);
711     if (ctx->directSolve) {
712       ierr = PetscViewerASCIIPrintf(viewer,"      Using direct solver to construct interpolation\n");CHKERRQ(ierr);
713     } else {
714       PetscViewer sviewer;
715       PetscMPIInt rank;
716 
717       ierr = PetscViewerASCIIPrintf(viewer,"      Using iterative solver to construct interpolation\n");CHKERRQ(ierr);
718       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
719       ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);  /* should not need to push this twice? */
720       ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
721       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr);
722       if (!rank) {
723         ierr = KSPView(ctx->ksp,sviewer);CHKERRQ(ierr);
724       }
725       ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
726       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
727       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
728     }
729   }
730   ierr = PCView_MG(pc,viewer);CHKERRQ(ierr);
731   PetscFunctionReturn(0);
732 }
733 
734 #undef __FUNCT__
735 #define __FUNCT__ "PCSetFromOptions_Exotic"
736 PetscErrorCode PCSetFromOptions_Exotic(PC pc)
737 {
738   PetscErrorCode ierr;
739   PetscBool      flg;
740   PC_MG          *mg = (PC_MG*)pc->data;
741   PCExoticType   mgctype;
742   PC_Exotic      *ctx = (PC_Exotic*) mg->innerctx;
743 
744   PetscFunctionBegin;
745   ierr = PetscOptionsHead("Exotic coarse space options");CHKERRQ(ierr);
746   ierr = PetscOptionsEnum("-pc_exotic_type","face or wirebasket","PCExoticSetType",PCExoticTypes,(PetscEnum)ctx->type,(PetscEnum*)&mgctype,&flg);CHKERRQ(ierr);
747   if (flg) {
748     ierr = PCExoticSetType(pc,mgctype);CHKERRQ(ierr);
749   }
750   ierr = PetscOptionsBool("-pc_exotic_direct_solver","use direct solver to construct interpolation","None",ctx->directSolve,&ctx->directSolve,NULL);CHKERRQ(ierr);
751   if (!ctx->directSolve) {
752     if (!ctx->ksp) {
753       const char *prefix;
754       ierr = KSPCreate(PETSC_COMM_SELF,&ctx->ksp);CHKERRQ(ierr);
755       ierr = PetscObjectIncrementTabLevel((PetscObject)ctx->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
756       ierr = PetscLogObjectParent(pc,ctx->ksp);CHKERRQ(ierr);
757       ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
758       ierr = KSPSetOptionsPrefix(ctx->ksp,prefix);CHKERRQ(ierr);
759       ierr = KSPAppendOptionsPrefix(ctx->ksp,"exotic_");CHKERRQ(ierr);
760     }
761     ierr = KSPSetFromOptions(ctx->ksp);CHKERRQ(ierr);
762   }
763   ierr = PetscOptionsTail();CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 
768 /*MC
769      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces
770 
771      This uses the PCMG infrastructure restricted to two levels and the face and wirebasket based coarse
772    grid spaces.
773 
774    Notes: By default this uses GMRES on the fine grid smoother so this should be used with KSPFGMRES or the smoother changed to not use GMRES
775 
776    References: These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz, "The Construction
777    of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53 pages 1--24, 1989.
778    They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith,
779    New York University, 1990. They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis
780    of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical
781    Analysis, volume 31. pages 1662-1694, 1994. These were developed in the context of iterative substructuring preconditioners.
782    They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
783    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example,
784    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco
785    Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
786    of the 17th International Conference on Domain Decomposition Methods in
787    Science and Engineering, held in Strobl, Austria, July 3-7, 2006, number 60 in
788    Springer-Verlag, Lecture Notes in Computational Science and Engineering, pages 255-261, 2007.
789    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy min-
790    imizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer,
791    Marco Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
792    of the 17th International Conference on Domain Decomposition Methods
793    in Science and Engineering, held in Strobl, Austria, July 3-7, 2006, number 60 in
794    Springer-Verlag, Lecture Notes in Computational Science and Engineering, pages 247-254, 2007
795    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition
796    for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J.
797    Numer. Anal., 46(4):2153-2168, 2008.
798    Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz
799    algorithm for almost incompressible elasticity. Technical Report
800    TR2008-912, Department of Computer Science, Courant Institute
801    of Mathematical Sciences, New York University, May 2008. URL:
802    http://cs.nyu.edu/csweb/Research/TechReports/TR2008-912/TR2008-912.pdf
803 
804    Options Database: The usual PCMG options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type>
805       -pc_mg_type <type>
806 
807    Level: advanced
808 
809 .seealso:  PCMG, PCSetDM(), PCExoticType, PCExoticSetType()
810 M*/
811 
812 EXTERN_C_BEGIN
813 #undef __FUNCT__
814 #define __FUNCT__ "PCCreate_Exotic"
815 PetscErrorCode  PCCreate_Exotic(PC pc)
816 {
817   PetscErrorCode ierr;
818   PC_Exotic      *ex;
819   PC_MG          *mg;
820 
821   PetscFunctionBegin;
822   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
823   if (pc->ops->destroy) {
824     ierr =  (*pc->ops->destroy)(pc);CHKERRQ(ierr);
825     pc->data = 0;
826   }
827   ierr = PetscFree(((PetscObject)pc)->type_name);CHKERRQ(ierr);
828   ((PetscObject)pc)->type_name = 0;
829 
830   ierr         = PCSetType(pc,PCMG);CHKERRQ(ierr);
831   ierr         = PCMGSetLevels(pc,2,NULL);CHKERRQ(ierr);
832   ierr         = PCMGSetGalerkin(pc,PETSC_TRUE);CHKERRQ(ierr);
833   ierr         = PetscNew(PC_Exotic,&ex);CHKERRQ(ierr); \
834   ex->type     = PC_EXOTIC_FACE;
835   mg           = (PC_MG*) pc->data;
836   mg->innerctx = ex;
837 
838 
839   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
840   pc->ops->view           = PCView_Exotic;
841   pc->ops->destroy        = PCDestroy_Exotic;
842   pc->ops->setup          = PCSetUp_Exotic;
843 
844   ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCExoticSetType_C","PCExoticSetType_Exotic",PCExoticSetType_Exotic);CHKERRQ(ierr);
845   PetscFunctionReturn(0);
846 }
847 EXTERN_C_END
848