xref: /petsc/src/ksp/pc/impls/wb/wb.c (revision cf53795e253c8c9d9d0fbcef4cffd37d916c4e81)
1 
2 #include <petscdmda.h>              /*I "petscdmda.h" I*/
3 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/
4 #include <petscctable.h>
5 
6 typedef struct {
7   PCExoticType type;
8   Mat          P;           /* the constructed interpolation matrix */
9   PetscBool    directSolve; /* use direct LU factorization to construct interpolation */
10   KSP          ksp;
11 } PC_Exotic;
12 
13 const char *const PCExoticTypes[] = {"face", "wirebasket", "PCExoticType", "PC_Exotic", NULL};
14 
15 /*
16       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
17 
18 */
19 PetscErrorCode DMDAGetWireBasketInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
20 {
21   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
22   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[26], *globals, Ng, *IIint, *IIsurf, Nt;
23   Mat                    Xint, Xsurf, Xint_tmp;
24   IS                     isint, issurf, is, row, col;
25   ISLocalToGlobalMapping ltg;
26   MPI_Comm               comm;
27   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
28   MatFactorInfo          info;
29   PetscScalar           *xsurf, *xint;
30   const PetscScalar     *rxint;
31 #if defined(PETSC_USE_DEBUG_foo)
32   PetscScalar tmp;
33 #endif
34   PetscTable ht;
35 
36   PetscFunctionBegin;
37   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
38   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
39   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
40   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
41   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
42   istart = istart ? -1 : 0;
43   jstart = jstart ? -1 : 0;
44   kstart = kstart ? -1 : 0;
45 
46   /*
47     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
48     to all the local degrees of freedom (this includes the vertices, edges and faces).
49 
50     Xint are the subset of the interpolation into the interior
51 
52     Xface are the interpolation onto faces but not into the interior
53 
54     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
55                                       Xint
56     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
57                                       Xsurf
58   */
59   N     = (m - istart) * (n - jstart) * (p - kstart);
60   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
61   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
62   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
63   Nsurf = Nface + Nwire;
64   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 26, NULL, &Xint));
65   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 26, NULL, &Xsurf));
66   PetscCall(MatDenseGetArray(Xsurf, &xsurf));
67 
68   /*
69      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
70      Xsurf will be all zero (thus making the coarse matrix singular).
71   */
72   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
73   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
74   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");
75 
76   cnt = 0;
77 
78   xsurf[cnt++] = 1;
79   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + Nsurf] = 1;
80   xsurf[cnt++ + 2 * Nsurf] = 1;
81 
82   for (j = 1; j < n - 1 - jstart; j++) {
83     xsurf[cnt++ + 3 * Nsurf] = 1;
84     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
85     xsurf[cnt++ + 5 * Nsurf] = 1;
86   }
87 
88   xsurf[cnt++ + 6 * Nsurf] = 1;
89   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 7 * Nsurf] = 1;
90   xsurf[cnt++ + 8 * Nsurf] = 1;
91 
92   for (k = 1; k < p - 1 - kstart; k++) {
93     xsurf[cnt++ + 9 * Nsurf] = 1;
94     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 10 * Nsurf] = 1;
95     xsurf[cnt++ + 11 * Nsurf] = 1;
96 
97     for (j = 1; j < n - 1 - jstart; j++) {
98       xsurf[cnt++ + 12 * Nsurf] = 1;
99       /* these are the interior nodes */
100       xsurf[cnt++ + 13 * Nsurf] = 1;
101     }
102 
103     xsurf[cnt++ + 14 * Nsurf] = 1;
104     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 15 * Nsurf] = 1;
105     xsurf[cnt++ + 16 * Nsurf] = 1;
106   }
107 
108   xsurf[cnt++ + 17 * Nsurf] = 1;
109   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 18 * Nsurf] = 1;
110   xsurf[cnt++ + 19 * Nsurf] = 1;
111 
112   for (j = 1; j < n - 1 - jstart; j++) {
113     xsurf[cnt++ + 20 * Nsurf] = 1;
114     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 21 * Nsurf] = 1;
115     xsurf[cnt++ + 22 * Nsurf] = 1;
116   }
117 
118   xsurf[cnt++ + 23 * Nsurf] = 1;
119   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 24 * Nsurf] = 1;
120   xsurf[cnt++ + 25 * Nsurf] = 1;
121 
122   /* interpolations only sum to 1 when using direct solver */
123 #if defined(PETSC_USE_DEBUG_foo)
124   for (i = 0; i < Nsurf; i++) {
125     tmp = 0.0;
126     for (j = 0; j < 26; j++) tmp += xsurf[i + j * Nsurf];
127     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
128   }
129 #endif
130   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
131   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/
132 
133   /*
134        I are the indices for all the needed vertices (in global numbering)
135        Iint are the indices for the interior values, I surf for the surface values
136             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
137              is NOT the local DMDA ordering.)
138        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
139   */
140 #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
141   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
142   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
143   for (k = 0; k < p - kstart; k++) {
144     for (j = 0; j < n - jstart; j++) {
145       for (i = 0; i < m - istart; i++) {
146         II[c++] = i + j * mwidth + k * mwidth * nwidth;
147 
148         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
149           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
150           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
151         } else {
152           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
153           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
154         }
155       }
156     }
157   }
158   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
159   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
160   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
161   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
162   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
163   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
164   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
165   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
166   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
167   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
168   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
169   PetscCall(PetscFree3(II, Iint, Isurf));
170 
171   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
172   A = *Aholder;
173   PetscCall(PetscFree(Aholder));
174 
175   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
176   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
177   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));
178 
179   /*
180      Solve for the interpolation onto the interior Xint
181   */
182   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
183   PetscCall(MatScale(Xint_tmp, -1.0));
184   if (exotic->directSolve) {
185     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
186     PetscCall(MatFactorInfoInitialize(&info));
187     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
188     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
189     PetscCall(ISDestroy(&row));
190     PetscCall(ISDestroy(&col));
191     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
192     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
193     PetscCall(MatDestroy(&iAii));
194   } else {
195     Vec          b, x;
196     PetscScalar *xint_tmp;
197 
198     PetscCall(MatDenseGetArray(Xint, &xint));
199     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
200     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
201     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
202     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
203     for (i = 0; i < 26; i++) {
204       PetscCall(VecPlaceArray(x, xint + i * Nint));
205       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
206       PetscCall(KSPSolve(exotic->ksp, b, x));
207       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
208       PetscCall(VecResetArray(x));
209       PetscCall(VecResetArray(b));
210     }
211     PetscCall(MatDenseRestoreArray(Xint, &xint));
212     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
213     PetscCall(VecDestroy(&x));
214     PetscCall(VecDestroy(&b));
215   }
216   PetscCall(MatDestroy(&Xint_tmp));
217 
218 #if defined(PETSC_USE_DEBUG_foo)
219   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
220   for (i = 0; i < Nint; i++) {
221     tmp = 0.0;
222     for (j = 0; j < 26; j++) tmp += rxint[i + j * Nint];
223 
224     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
225   }
226   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
227   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
228 #endif
229 
230   /*         total vertices             total faces                                  total edges */
231   Ntotal = (mp + 1) * (np + 1) * (pp + 1) + mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1) + mp * (np + 1) * (pp + 1) + np * (mp + 1) * (pp + 1) + pp * (mp + 1) * (np + 1);
232 
233   /*
234       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
235   */
236   cnt = 0;
237 
238   gl[cnt++] = 0;
239   {
240     gl[cnt++] = 1;
241   }
242   gl[cnt++] = m - istart - 1;
243   {
244     gl[cnt++] = mwidth;
245     {
246       gl[cnt++] = mwidth + 1;
247     }
248     gl[cnt++] = mwidth + m - istart - 1;
249   }
250   gl[cnt++] = mwidth * (n - jstart - 1);
251   {
252     gl[cnt++] = mwidth * (n - jstart - 1) + 1;
253   }
254   gl[cnt++] = mwidth * (n - jstart - 1) + m - istart - 1;
255   {
256     gl[cnt++] = mwidth * nwidth;
257     {
258       gl[cnt++] = mwidth * nwidth + 1;
259     }
260     gl[cnt++] = mwidth * nwidth + m - istart - 1;
261     {
262       gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
263       gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
264     }
265     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1);
266     {
267       gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
268     }
269     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + m - istart - 1;
270   }
271   gl[cnt++] = mwidth * nwidth * (p - kstart - 1);
272   {
273     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + 1;
274   }
275   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + m - istart - 1;
276   {
277     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth;
278     {
279       gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
280     }
281     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + m - istart - 1;
282   }
283   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1);
284   {
285     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + 1;
286   }
287   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + m - istart - 1;
288 
289   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
290   /* convert that to global numbering and get them on all processes */
291   PetscCall(ISLocalToGlobalMappingApply(ltg, 26, gl, gl));
292   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
293   PetscCall(PetscMalloc1(26 * mp * np * pp, &globals));
294   PetscCallMPI(MPI_Allgather(gl, 26, MPIU_INT, globals, 26, MPIU_INT, PetscObjectComm((PetscObject)da)));
295 
296   /* Number the coarse grid points from 0 to Ntotal */
297   PetscCall(MatGetSize(Aglobal, &Nt, NULL));
298   PetscCall(PetscTableCreate(Ntotal / 3, Nt + 1, &ht));
299   for (i = 0; i < 26 * mp * np * pp; i++) PetscCall(PetscTableAddCount(ht, globals[i] + 1));
300   PetscCall(PetscTableGetCount(ht, &cnt));
301   PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
302   PetscCall(PetscFree(globals));
303   for (i = 0; i < 26; i++) {
304     PetscCall(PetscTableFind(ht, gl[i] + 1, &gl[i]));
305     gl[i]--;
306   }
307   PetscCall(PetscTableDestroy(&ht));
308   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
309 
310   /* construct global interpolation matrix */
311   PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
312   if (reuse == MAT_INITIAL_MATRIX) {
313     PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint + Nsurf, NULL, P));
314   } else {
315     PetscCall(MatZeroEntries(*P));
316   }
317   PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
318   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
319   PetscCall(MatSetValues(*P, Nint, IIint, 26, gl, rxint, INSERT_VALUES));
320   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
321   PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
322   PetscCall(MatSetValues(*P, Nsurf, IIsurf, 26, gl, rxint, INSERT_VALUES));
323   PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
324   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
325   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
326   PetscCall(PetscFree2(IIint, IIsurf));
327 
328 #if defined(PETSC_USE_DEBUG_foo)
329   {
330     Vec          x, y;
331     PetscScalar *yy;
332     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
333     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
334     PetscCall(VecSet(x, 1.0));
335     PetscCall(MatMult(*P, x, y));
336     PetscCall(VecGetArray(y, &yy));
337     for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
338     PetscCall(VecRestoreArray(y, &yy));
339     PetscCall(VecDestroy(x));
340     PetscCall(VecDestroy(y));
341   }
342 #endif
343 
344   PetscCall(MatDestroy(&Aii));
345   PetscCall(MatDestroy(&Ais));
346   PetscCall(MatDestroy(&Asi));
347   PetscCall(MatDestroy(&A));
348   PetscCall(ISDestroy(&is));
349   PetscCall(ISDestroy(&isint));
350   PetscCall(ISDestroy(&issurf));
351   PetscCall(MatDestroy(&Xint));
352   PetscCall(MatDestroy(&Xsurf));
353   PetscFunctionReturn(0);
354 }
355 
356 /*
357       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
358 
359 */
360 PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
361 {
362   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
363   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[6], *globals, Ng, *IIint, *IIsurf, Nt;
364   Mat                    Xint, Xsurf, Xint_tmp;
365   IS                     isint, issurf, is, row, col;
366   ISLocalToGlobalMapping ltg;
367   MPI_Comm               comm;
368   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
369   MatFactorInfo          info;
370   PetscScalar           *xsurf, *xint;
371   const PetscScalar     *rxint;
372 #if defined(PETSC_USE_DEBUG_foo)
373   PetscScalar tmp;
374 #endif
375   PetscTable ht;
376 
377   PetscFunctionBegin;
378   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
379   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
380   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
381   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
382   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
383   istart = istart ? -1 : 0;
384   jstart = jstart ? -1 : 0;
385   kstart = kstart ? -1 : 0;
386 
387   /*
388     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
389     to all the local degrees of freedom (this includes the vertices, edges and faces).
390 
391     Xint are the subset of the interpolation into the interior
392 
393     Xface are the interpolation onto faces but not into the interior
394 
395     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
396                                       Xint
397     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
398                                       Xsurf
399   */
400   N     = (m - istart) * (n - jstart) * (p - kstart);
401   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
402   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
403   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
404   Nsurf = Nface + Nwire;
405   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 6, NULL, &Xint));
406   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 6, NULL, &Xsurf));
407   PetscCall(MatDenseGetArray(Xsurf, &xsurf));
408 
409   /*
410      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
411      Xsurf will be all zero (thus making the coarse matrix singular).
412   */
413   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
414   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
415   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");
416 
417   cnt = 0;
418   for (j = 1; j < n - 1 - jstart; j++) {
419     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 0 * Nsurf] = 1;
420   }
421 
422   for (k = 1; k < p - 1 - kstart; k++) {
423     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 1 * Nsurf] = 1;
424     for (j = 1; j < n - 1 - jstart; j++) {
425       xsurf[cnt++ + 2 * Nsurf] = 1;
426       /* these are the interior nodes */
427       xsurf[cnt++ + 3 * Nsurf] = 1;
428     }
429     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
430   }
431   for (j = 1; j < n - 1 - jstart; j++) {
432     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 5 * Nsurf] = 1;
433   }
434 
435 #if defined(PETSC_USE_DEBUG_foo)
436   for (i = 0; i < Nsurf; i++) {
437     tmp = 0.0;
438     for (j = 0; j < 6; j++) tmp += xsurf[i + j * Nsurf];
439 
440     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
441   }
442 #endif
443   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
444   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/
445 
446   /*
447        I are the indices for all the needed vertices (in global numbering)
448        Iint are the indices for the interior values, I surf for the surface values
449             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
450              is NOT the local DMDA ordering.)
451        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
452   */
453 #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
454   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
455   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
456   for (k = 0; k < p - kstart; k++) {
457     for (j = 0; j < n - jstart; j++) {
458       for (i = 0; i < m - istart; i++) {
459         II[c++] = i + j * mwidth + k * mwidth * nwidth;
460 
461         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
462           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
463           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
464         } else {
465           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
466           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
467         }
468       }
469     }
470   }
471   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
472   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
473   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
474   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
475   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
476   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
477   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
478   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
479   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
480   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
481   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
482   PetscCall(PetscFree3(II, Iint, Isurf));
483 
484   PetscCall(ISSort(is));
485   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
486   A = *Aholder;
487   PetscCall(PetscFree(Aholder));
488 
489   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
490   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
491   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));
492 
493   /*
494      Solve for the interpolation onto the interior Xint
495   */
496   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
497   PetscCall(MatScale(Xint_tmp, -1.0));
498 
499   if (exotic->directSolve) {
500     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
501     PetscCall(MatFactorInfoInitialize(&info));
502     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
503     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
504     PetscCall(ISDestroy(&row));
505     PetscCall(ISDestroy(&col));
506     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
507     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
508     PetscCall(MatDestroy(&iAii));
509   } else {
510     Vec          b, x;
511     PetscScalar *xint_tmp;
512 
513     PetscCall(MatDenseGetArray(Xint, &xint));
514     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
515     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
516     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
517     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
518     for (i = 0; i < 6; i++) {
519       PetscCall(VecPlaceArray(x, xint + i * Nint));
520       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
521       PetscCall(KSPSolve(exotic->ksp, b, x));
522       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
523       PetscCall(VecResetArray(x));
524       PetscCall(VecResetArray(b));
525     }
526     PetscCall(MatDenseRestoreArray(Xint, &xint));
527     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
528     PetscCall(VecDestroy(&x));
529     PetscCall(VecDestroy(&b));
530   }
531   PetscCall(MatDestroy(&Xint_tmp));
532 
533 #if defined(PETSC_USE_DEBUG_foo)
534   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
535   for (i = 0; i < Nint; i++) {
536     tmp = 0.0;
537     for (j = 0; j < 6; j++) tmp += rxint[i + j * Nint];
538 
539     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
540   }
541   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
542   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
543 #endif
544 
545   /*         total faces    */
546   Ntotal = mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1);
547 
548   /*
549       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
550   */
551   cnt = 0;
552   {
553     gl[cnt++] = mwidth + 1;
554   }
555   {{gl[cnt++] = mwidth * nwidth + 1;
556 }
557 {
558   gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
559   gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
560 }
561 {
562   gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
563 }
564 }
565 {
566   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
567 }
568 
569 /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
570 /* convert that to global numbering and get them on all processes */
571 PetscCall(ISLocalToGlobalMappingApply(ltg, 6, gl, gl));
572 /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
573 PetscCall(PetscMalloc1(6 * mp * np * pp, &globals));
574 PetscCallMPI(MPI_Allgather(gl, 6, MPIU_INT, globals, 6, MPIU_INT, PetscObjectComm((PetscObject)da)));
575 
576 /* Number the coarse grid points from 0 to Ntotal */
577 PetscCall(MatGetSize(Aglobal, &Nt, NULL));
578 PetscCall(PetscTableCreate(Ntotal / 3, Nt + 1, &ht));
579 for (i = 0; i < 6 * mp * np * pp; i++) PetscCall(PetscTableAddCount(ht, globals[i] + 1));
580 PetscCall(PetscTableGetCount(ht, &cnt));
581 PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
582 PetscCall(PetscFree(globals));
583 for (i = 0; i < 6; i++) {
584   PetscCall(PetscTableFind(ht, gl[i] + 1, &gl[i]));
585   gl[i]--;
586 }
587 PetscCall(PetscTableDestroy(&ht));
588 /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
589 
590 /* construct global interpolation matrix */
591 PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
592 if (reuse == MAT_INITIAL_MATRIX) {
593   PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint, NULL, P));
594 } else {
595   PetscCall(MatZeroEntries(*P));
596 }
597 PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
598 PetscCall(MatDenseGetArrayRead(Xint, &rxint));
599 PetscCall(MatSetValues(*P, Nint, IIint, 6, gl, rxint, INSERT_VALUES));
600 PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
601 PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
602 PetscCall(MatSetValues(*P, Nsurf, IIsurf, 6, gl, rxint, INSERT_VALUES));
603 PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
604 PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
605 PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
606 PetscCall(PetscFree2(IIint, IIsurf));
607 
608 #if defined(PETSC_USE_DEBUG_foo)
609 {
610   Vec          x, y;
611   PetscScalar *yy;
612   PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
613   PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
614   PetscCall(VecSet(x, 1.0));
615   PetscCall(MatMult(*P, x, y));
616   PetscCall(VecGetArray(y, &yy));
617   for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
618   PetscCall(VecRestoreArray(y, &yy));
619   PetscCall(VecDestroy(x));
620   PetscCall(VecDestroy(y));
621 }
622 #endif
623 
624 PetscCall(MatDestroy(&Aii));
625 PetscCall(MatDestroy(&Ais));
626 PetscCall(MatDestroy(&Asi));
627 PetscCall(MatDestroy(&A));
628 PetscCall(ISDestroy(&is));
629 PetscCall(ISDestroy(&isint));
630 PetscCall(ISDestroy(&issurf));
631 PetscCall(MatDestroy(&Xint));
632 PetscCall(MatDestroy(&Xsurf));
633 PetscFunctionReturn(0);
634 }
635 
636 /*@
637    PCExoticSetType - Sets the type of coarse grid interpolation to use
638 
639    Logically Collective
640 
641    Input Parameters:
642 +  pc - the preconditioner context
643 -  type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)
644 
645    Notes:
646     The face based interpolation has 1 degree of freedom per face and ignores the
647      edge and vertex values completely in the coarse problem. For any seven point
648      stencil the interpolation of a constant on all faces into the interior is that constant.
649 
650      The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
651      per face. A constant on the subdomain boundary is interpolated as that constant
652      in the interior of the domain.
653 
654      The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
655      if A is nonsingular A_c is also nonsingular.
656 
657      Both interpolations are suitable for only scalar problems.
658 
659    Level: intermediate
660 
661 .seealso: `PCEXOTIC`, `PCExoticType()`
662 @*/
663 PetscErrorCode PCExoticSetType(PC pc, PCExoticType type)
664 {
665   PetscFunctionBegin;
666   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
667   PetscValidLogicalCollectiveEnum(pc, type, 2);
668   PetscTryMethod(pc, "PCExoticSetType_C", (PC, PCExoticType), (pc, type));
669   PetscFunctionReturn(0);
670 }
671 
672 static PetscErrorCode PCExoticSetType_Exotic(PC pc, PCExoticType type)
673 {
674   PC_MG     *mg  = (PC_MG *)pc->data;
675   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
676 
677   PetscFunctionBegin;
678   ctx->type = type;
679   PetscFunctionReturn(0);
680 }
681 
682 PetscErrorCode PCSetUp_Exotic(PC pc)
683 {
684   Mat        A;
685   PC_MG     *mg    = (PC_MG *)pc->data;
686   PC_Exotic *ex    = (PC_Exotic *)mg->innerctx;
687   MatReuse   reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
688 
689   PetscFunctionBegin;
690   PetscCheck(pc->dm, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Need to call PCSetDM() before using this PC");
691   PetscCall(PCGetOperators(pc, NULL, &A));
692   if (ex->type == PC_EXOTIC_FACE) {
693     PetscCall(DMDAGetFaceInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
694   } else if (ex->type == PC_EXOTIC_WIREBASKET) {
695     PetscCall(DMDAGetWireBasketInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
696   } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unknown exotic coarse space %d", ex->type);
697   PetscCall(PCMGSetInterpolation(pc, 1, ex->P));
698   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
699   PetscCall(PCSetDM(pc, NULL));
700   PetscCall(PCSetUp_MG(pc));
701   PetscFunctionReturn(0);
702 }
703 
704 PetscErrorCode PCDestroy_Exotic(PC pc)
705 {
706   PC_MG     *mg  = (PC_MG *)pc->data;
707   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
708 
709   PetscFunctionBegin;
710   PetscCall(MatDestroy(&ctx->P));
711   PetscCall(KSPDestroy(&ctx->ksp));
712   PetscCall(PetscFree(ctx));
713   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", NULL));
714   PetscCall(PCDestroy_MG(pc));
715   PetscFunctionReturn(0);
716 }
717 
718 PetscErrorCode PCView_Exotic(PC pc, PetscViewer viewer)
719 {
720   PC_MG     *mg = (PC_MG *)pc->data;
721   PetscBool  iascii;
722   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;
723 
724   PetscFunctionBegin;
725   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
726   if (iascii) {
727     PetscCall(PetscViewerASCIIPrintf(viewer, "    Exotic type = %s\n", PCExoticTypes[ctx->type]));
728     if (ctx->directSolve) {
729       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using direct solver to construct interpolation\n"));
730     } else {
731       PetscViewer sviewer;
732       PetscMPIInt rank;
733 
734       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using iterative solver to construct interpolation\n"));
735       PetscCall(PetscViewerASCIIPushTab(viewer));
736       PetscCall(PetscViewerASCIIPushTab(viewer)); /* should not need to push this twice? */
737       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
738       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
739       if (rank == 0) PetscCall(KSPView(ctx->ksp, sviewer));
740       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
741       PetscCall(PetscViewerASCIIPopTab(viewer));
742       PetscCall(PetscViewerASCIIPopTab(viewer));
743     }
744   }
745   PetscCall(PCView_MG(pc, viewer));
746   PetscFunctionReturn(0);
747 }
748 
749 PetscErrorCode PCSetFromOptions_Exotic(PC pc, PetscOptionItems *PetscOptionsObject)
750 {
751   PetscBool    flg;
752   PC_MG       *mg = (PC_MG *)pc->data;
753   PCExoticType mgctype;
754   PC_Exotic   *ctx = (PC_Exotic *)mg->innerctx;
755 
756   PetscFunctionBegin;
757   PetscOptionsHeadBegin(PetscOptionsObject, "Exotic coarse space options");
758   PetscCall(PetscOptionsEnum("-pc_exotic_type", "face or wirebasket", "PCExoticSetType", PCExoticTypes, (PetscEnum)ctx->type, (PetscEnum *)&mgctype, &flg));
759   if (flg) PetscCall(PCExoticSetType(pc, mgctype));
760   PetscCall(PetscOptionsBool("-pc_exotic_direct_solver", "use direct solver to construct interpolation", "None", ctx->directSolve, &ctx->directSolve, NULL));
761   if (!ctx->directSolve) {
762     if (!ctx->ksp) {
763       const char *prefix;
764       PetscCall(KSPCreate(PETSC_COMM_SELF, &ctx->ksp));
765       PetscCall(KSPSetErrorIfNotConverged(ctx->ksp, pc->erroriffailure));
766       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ctx->ksp, (PetscObject)pc, 1));
767       PetscCall(PCGetOptionsPrefix(pc, &prefix));
768       PetscCall(KSPSetOptionsPrefix(ctx->ksp, prefix));
769       PetscCall(KSPAppendOptionsPrefix(ctx->ksp, "exotic_"));
770     }
771     PetscCall(KSPSetFromOptions(ctx->ksp));
772   }
773   PetscOptionsHeadEnd();
774   PetscFunctionReturn(0);
775 }
776 
777 /*MC
778      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces
779 
780      This uses the `PCMG` infrastructure restricted to two levels and the face and wirebasket based coarse
781    grid spaces.
782 
783    Level: advanced
784 
785    Notes:
786    Must be used with `DMDA`
787 
788    By default this uses `KSPGMRES` on the fine grid smoother so this should be used with `KSPFGMRES` or the smoother changed to not use `KSPGMRES`
789 
790    References:
791 +  * - These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz, "The Construction
792    of Preconditioners for Elliptic Problems by Substructing IV", Mathematics of Computation, volume 53, 1989.
793 .  * - They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith,
794    New York University, 1990.
795 .  * - They were then explored in great detail in Dryja, Smith, Widlund, "Schwarz Analysis
796    of Iterative Substructuring Methods for Elliptic Problems in Three Dimensions, SIAM Journal on Numerical
797    Analysis, volume 31. 1994. These were developed in the context of iterative substructuring preconditioners.
798 .  * - They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
799    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example,
800    Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Extending theory for domain decomposition algorithms to irregular subdomains. In Ulrich Langer, Marco
801    Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
802    of the 17th International Conference on Domain Decomposition Methods in
803    Science and Engineering, held in Strobl, Austria, 2006, number 60 in
804    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007.
805 .  * -  Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. A family of energy minimizing coarse spaces for overlapping Schwarz preconditioners. In Ulrich Langer,
806    Marco Discacciati, David Keyes, Olof Widlund, and Walter Zulehner, editors, Proceedings
807    of the 17th International Conference on Domain Decomposition Methods
808    in Science and Engineering, held in Strobl, Austria, 2006, number 60 in
809    Springer Verlag, Lecture Notes in Computational Science and Engineering, 2007
810 .  * - Clark R. Dohrmann, Axel Klawonn, and Olof B. Widlund. Domain decomposition
811    for less regular subdomains: Overlapping Schwarz in two dimensions. SIAM J.
812    Numer. Anal., 46(4), 2008.
813 -  * - Clark R. Dohrmann and Olof B. Widlund. An overlapping Schwarz
814    algorithm for almost incompressible elasticity. Technical Report
815    TR2008 912, Department of Computer Science, Courant Institute
816    of Mathematical Sciences, New York University, May 2008. URL:
817 
818    The usual `PCMG` options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type> and  -pc_mg_type <type>
819 
820 .seealso: `PCMG`, `PCSetDM()`, `PCExoticType`, `PCExoticSetType()`
821 M*/
822 
823 PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc)
824 {
825   PC_Exotic *ex;
826   PC_MG     *mg;
827 
828   PetscFunctionBegin;
829   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
830   PetscTryTypeMethod(pc, destroy);
831   pc->data = NULL;
832 
833   PetscCall(PetscFree(((PetscObject)pc)->type_name));
834   ((PetscObject)pc)->type_name = NULL;
835 
836   PetscCall(PCSetType(pc, PCMG));
837   PetscCall(PCMGSetLevels(pc, 2, NULL));
838   PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
839   PetscCall(PetscNew(&ex));
840   ex->type     = PC_EXOTIC_FACE;
841   mg           = (PC_MG *)pc->data;
842   mg->innerctx = ex;
843 
844   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
845   pc->ops->view           = PCView_Exotic;
846   pc->ops->destroy        = PCDestroy_Exotic;
847   pc->ops->setup          = PCSetUp_Exotic;
848 
849   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", PCExoticSetType_Exotic));
850   PetscFunctionReturn(0);
851 }
852