xref: /petsc/src/ksp/pc/impls/hpddm/pchpddm.cxx (revision fc884c7aed7fd26fa7ebc03ede74b1d61ba5d115)
1 #include <petsc/private/dmimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/petschpddm.h> /*I "petscpc.h" I*/
4 #include <petsc/private/pcimpl.h>     /* this must be included after petschpddm.h so that _PCIMPL_H is not defined            */
5                                       /* otherwise, it is assumed that one is compiling libhpddm_petsc => circular dependency */
6 #if PetscDefined(HAVE_FORTRAN)
7   #include <petsc/private/fortranimpl.h>
8 #endif
9 
10 static PetscErrorCode (*loadedSym)(HPDDM::Schwarz<PetscScalar> *const, IS, Mat, Mat, Mat, std::vector<Vec>, PC_HPDDM_Level **const) = NULL;
11 
12 static PetscBool PCHPDDMPackageInitialized = PETSC_FALSE;
13 
14 PetscLogEvent PC_HPDDM_Strc;
15 PetscLogEvent PC_HPDDM_PtAP;
16 PetscLogEvent PC_HPDDM_PtBP;
17 PetscLogEvent PC_HPDDM_Next;
18 PetscLogEvent PC_HPDDM_SetUp[PETSC_PCHPDDM_MAXLEVELS];
19 PetscLogEvent PC_HPDDM_Solve[PETSC_PCHPDDM_MAXLEVELS];
20 
21 const char *const PCHPDDMCoarseCorrectionTypes[] = {"DEFLATED", "ADDITIVE", "BALANCED", "PCHPDDMCoarseCorrectionType", "PC_HPDDM_COARSE_CORRECTION_", NULL};
22 
23 static PetscErrorCode PCReset_HPDDM(PC pc)
24 {
25   PC_HPDDM *data = (PC_HPDDM *)pc->data;
26   PetscInt  i;
27 
28   PetscFunctionBegin;
29   if (data->levels) {
30     for (i = 0; i < PETSC_PCHPDDM_MAXLEVELS && data->levels[i]; ++i) {
31       PetscCall(KSPDestroy(&data->levels[i]->ksp));
32       PetscCall(PCDestroy(&data->levels[i]->pc));
33       PetscCall(PetscFree(data->levels[i]));
34     }
35     PetscCall(PetscFree(data->levels));
36   }
37 
38   PetscCall(ISDestroy(&data->is));
39   PetscCall(MatDestroy(&data->aux));
40   PetscCall(MatDestroy(&data->B));
41   PetscCall(VecDestroy(&data->normal));
42   data->correction = PC_HPDDM_COARSE_CORRECTION_DEFLATED;
43   data->Neumann    = PETSC_BOOL3_UNKNOWN;
44   data->deflation  = PETSC_FALSE;
45   data->setup      = NULL;
46   data->setup_ctx  = NULL;
47   PetscFunctionReturn(0);
48 }
49 
50 static PetscErrorCode PCDestroy_HPDDM(PC pc)
51 {
52   PC_HPDDM *data = (PC_HPDDM *)pc->data;
53 
54   PetscFunctionBegin;
55   PetscCall(PCReset_HPDDM(pc));
56   PetscCall(PetscFree(data));
57   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, NULL));
58   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetAuxiliaryMat_C", NULL));
59   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMHasNeumannMat_C", NULL));
60   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetRHSMat_C", NULL));
61   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetCoarseCorrectionType_C", NULL));
62   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMGetCoarseCorrectionType_C", NULL));
63   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetSTShareSubKSP_C", NULL));
64   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMGetSTShareSubKSP_C", NULL));
65   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetDeflationMat_C", NULL));
66   PetscFunctionReturn(0);
67 }
68 
69 static inline PetscErrorCode PCHPDDMSetAuxiliaryMat_Private(PC pc, IS is, Mat A, PetscBool deflation)
70 {
71   PC_HPDDM *data = (PC_HPDDM *)pc->data;
72 
73   PetscFunctionBegin;
74   if (is) {
75     PetscCall(PetscObjectReference((PetscObject)is));
76     if (data->is) { /* new overlap definition resets the PC */
77       PetscCall(PCReset_HPDDM(pc));
78       pc->setfromoptionscalled = 0;
79       pc->setupcalled          = 0;
80     }
81     PetscCall(ISDestroy(&data->is));
82     data->is = is;
83   }
84   if (A) {
85     PetscCall(PetscObjectReference((PetscObject)A));
86     PetscCall(MatDestroy(&data->aux));
87     data->aux = A;
88   }
89   data->deflation = deflation;
90   PetscFunctionReturn(0);
91 }
92 
93 static inline PetscErrorCode PCHPDDMSetAuxiliaryMatNormal_Private(PC pc, Mat A, Mat N, Mat *B, const char *pcpre)
94 {
95   PC_HPDDM *data = (PC_HPDDM *)pc->data;
96   Mat      *splitting, *sub, aux;
97   IS        is, cols[2], rows;
98   PetscReal norm;
99   PetscBool flg;
100   char      type[256] = {}; /* same size as in src/ksp/pc/interface/pcset.c */
101 
102   PetscFunctionBegin;
103   PetscCall(MatConvert(N, MATAIJ, MAT_INITIAL_MATRIX, B));
104   PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->n, A->cmap->rstart, 1, cols));
105   PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->N, 0, 1, &rows));
106   PetscCall(MatSetOption(A, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
107   PetscCall(MatIncreaseOverlap(*B, 1, cols, 1));
108   PetscCall(MatCreateSubMatrices(A, 1, &rows, cols, MAT_INITIAL_MATRIX, &splitting));
109   PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->n, A->cmap->rstart, 1, &is));
110   PetscCall(ISEmbed(*cols, is, PETSC_TRUE, cols + 1));
111   PetscCall(ISDestroy(&is));
112   PetscCall(MatCreateSubMatrices(*splitting, 1, &rows, cols + 1, MAT_INITIAL_MATRIX, &sub));
113   PetscCall(ISDestroy(cols + 1));
114   PetscCall(MatFindZeroRows(*sub, &is));
115   PetscCall(MatDestroySubMatrices(1, &sub));
116   PetscCall(ISDestroy(&rows));
117   PetscCall(MatSetOption(*splitting, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
118   PetscCall(MatZeroRowsIS(*splitting, is, 0.0, NULL, NULL));
119   PetscCall(ISDestroy(&is));
120   PetscCall(PetscOptionsGetString(NULL, pcpre, "-pc_hpddm_levels_1_sub_pc_type", type, sizeof(type), NULL));
121   PetscCall(PetscStrcmp(type, PCQR, &flg));
122   if (!flg) {
123     Mat conjugate = *splitting;
124     if (PetscDefined(USE_COMPLEX)) {
125       PetscCall(MatDuplicate(*splitting, MAT_COPY_VALUES, &conjugate));
126       PetscCall(MatConjugate(conjugate));
127     }
128     PetscCall(MatTransposeMatMult(conjugate, *splitting, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &aux));
129     if (PetscDefined(USE_COMPLEX)) PetscCall(MatDestroy(&conjugate));
130     PetscCall(MatNorm(aux, NORM_FROBENIUS, &norm));
131     PetscCall(MatSetOption(aux, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
132     PetscCall(MatShift(aux, PETSC_SMALL * norm));
133   } else {
134     PetscBool flg;
135     PetscCall(PetscObjectTypeCompare((PetscObject)N, MATNORMAL, &flg));
136     if (flg) PetscCall(MatCreateNormal(*splitting, &aux));
137     else PetscCall(MatCreateNormalHermitian(*splitting, &aux));
138   }
139   PetscCall(MatDestroySubMatrices(1, &splitting));
140   PetscCall(PCHPDDMSetAuxiliaryMat(pc, *cols, aux, NULL, NULL));
141   data->Neumann = PETSC_BOOL3_TRUE;
142   PetscCall(ISDestroy(cols));
143   PetscCall(MatDestroy(&aux));
144   PetscFunctionReturn(0);
145 }
146 
147 static PetscErrorCode PCHPDDMSetAuxiliaryMat_HPDDM(PC pc, IS is, Mat A, PetscErrorCode (*setup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void *setup_ctx)
148 {
149   PC_HPDDM *data = (PC_HPDDM *)pc->data;
150 
151   PetscFunctionBegin;
152   PetscCall(PCHPDDMSetAuxiliaryMat_Private(pc, is, A, PETSC_FALSE));
153   if (setup) {
154     data->setup     = setup;
155     data->setup_ctx = setup_ctx;
156   }
157   PetscFunctionReturn(0);
158 }
159 
160 /*@
161      PCHPDDMSetAuxiliaryMat - Sets the auxiliary matrix used by `PCHPDDM` for the concurrent GenEO problems at the finest level. As an example, in a finite element context with nonoverlapping subdomains plus (overlapping) ghost elements, this could be the unassembled (Neumann) local overlapping operator. As opposed to the assembled (Dirichlet) local overlapping operator obtained by summing neighborhood contributions at the interface of ghost elements.
162 
163    Input Parameters:
164 +     pc - preconditioner context
165 .     is - index set of the local auxiliary, e.g., Neumann, matrix
166 .     A - auxiliary sequential matrix
167 .     setup - function for generating the auxiliary matrix
168 -     setup_ctx - context for setup
169 
170    Level: intermediate
171 
172 .seealso: `PCHPDDM`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHPDDMSetRHSMat()`, `MATIS`
173 @*/
174 PetscErrorCode PCHPDDMSetAuxiliaryMat(PC pc, IS is, Mat A, PetscErrorCode (*setup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void *setup_ctx)
175 {
176   PetscFunctionBegin;
177   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
178   if (is) PetscValidHeaderSpecific(is, IS_CLASSID, 2);
179   if (A) PetscValidHeaderSpecific(A, MAT_CLASSID, 3);
180 #if PetscDefined(HAVE_FORTRAN)
181   if (reinterpret_cast<void *>(setup) == reinterpret_cast<void *>(PETSC_NULL_FUNCTION_Fortran)) setup = NULL;
182   if (setup_ctx == PETSC_NULL_INTEGER_Fortran) setup_ctx = NULL;
183 #endif
184   PetscTryMethod(pc, "PCHPDDMSetAuxiliaryMat_C", (PC, IS, Mat, PetscErrorCode(*)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void *), (pc, is, A, setup, setup_ctx));
185   PetscFunctionReturn(0);
186 }
187 
188 static PetscErrorCode PCHPDDMHasNeumannMat_HPDDM(PC pc, PetscBool has)
189 {
190   PC_HPDDM *data = (PC_HPDDM *)pc->data;
191 
192   PetscFunctionBegin;
193   data->Neumann = PetscBoolToBool3(has);
194   PetscFunctionReturn(0);
195 }
196 
197 /*@
198      PCHPDDMHasNeumannMat - Informs `PCHPDDM` that the `Mat` passed to `PCHPDDMSetAuxiliaryMat()` is the local Neumann matrix.
199 
200    Input Parameters:
201 +     pc - preconditioner context
202 -     has - Boolean value
203 
204    Level: intermediate
205 
206    Notes:
207    This may be used to bypass a call to `MatCreateSubMatrices()` and to `MatConvert()` for `MATSBAIJ` matrices.
208 
209    If a `DMCreateNeumannOverlap()` implementation is available in the `DM` attached to the Pmat, or the Amat, or the `PC`, the flag is internally set to `PETSC_TRUE`. Its default value is otherwise `PETSC_FALSE`.
210 
211 .seealso: `PCHPDDM`, `PCHPDDMSetAuxiliaryMat()`
212 @*/
213 PetscErrorCode PCHPDDMHasNeumannMat(PC pc, PetscBool has)
214 {
215   PetscFunctionBegin;
216   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
217   PetscTryMethod(pc, "PCHPDDMHasNeumannMat_C", (PC, PetscBool), (pc, has));
218   PetscFunctionReturn(0);
219 }
220 
221 static PetscErrorCode PCHPDDMSetRHSMat_HPDDM(PC pc, Mat B)
222 {
223   PC_HPDDM *data = (PC_HPDDM *)pc->data;
224 
225   PetscFunctionBegin;
226   PetscCall(PetscObjectReference((PetscObject)B));
227   PetscCall(MatDestroy(&data->B));
228   data->B = B;
229   PetscFunctionReturn(0);
230 }
231 
232 /*@
233      PCHPDDMSetRHSMat - Sets the right-hand side matrix used by `PCHPDDM` for the concurrent GenEO problems at the finest level. Must be used in conjunction with `PCHPDDMSetAuxiliaryMat`(N), so that Nv = lambda Bv is solved using `EPSSetOperators`(N, B). It is assumed that N and B are provided using the same numbering. This provides a means to try more advanced methods such as GenEO-II or H-GenEO.
234 
235    Input Parameters:
236 +     pc - preconditioner context
237 -     B - right-hand side sequential matrix
238 
239    Level: advanced
240 
241 .seealso: `PCHPDDMSetAuxiliaryMat()`, `PCHPDDM`
242 @*/
243 PetscErrorCode PCHPDDMSetRHSMat(PC pc, Mat B)
244 {
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
247   if (B) {
248     PetscValidHeaderSpecific(B, MAT_CLASSID, 2);
249     PetscTryMethod(pc, "PCHPDDMSetRHSMat_C", (PC, Mat), (pc, B));
250   }
251   PetscFunctionReturn(0);
252 }
253 
254 static PetscErrorCode PCSetFromOptions_HPDDM(PC pc, PetscOptionItems *PetscOptionsObject)
255 {
256   PC_HPDDM                   *data   = (PC_HPDDM *)pc->data;
257   PC_HPDDM_Level            **levels = data->levels;
258   char                        prefix[256];
259   int                         i = 1;
260   PetscMPIInt                 size, previous;
261   PetscInt                    n;
262   PCHPDDMCoarseCorrectionType type;
263   PetscBool                   flg = PETSC_TRUE, set;
264 
265   PetscFunctionBegin;
266   if (!data->levels) {
267     PetscCall(PetscCalloc1(PETSC_PCHPDDM_MAXLEVELS, &levels));
268     data->levels = levels;
269   }
270   PetscOptionsHeadBegin(PetscOptionsObject, "PCHPDDM options");
271   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
272   previous = size;
273   while (i < PETSC_PCHPDDM_MAXLEVELS) {
274     PetscInt p = 1;
275 
276     if (!data->levels[i - 1]) PetscCall(PetscNew(data->levels + i - 1));
277     data->levels[i - 1]->parent = data;
278     /* if the previous level has a single process, it is not possible to coarsen further */
279     if (previous == 1 || !flg) break;
280     data->levels[i - 1]->nu        = 0;
281     data->levels[i - 1]->threshold = -1.0;
282     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_eps_nev", i));
283     PetscCall(PetscOptionsBoundedInt(prefix, "Local number of deflation vectors computed by SLEPc", "EPSSetDimensions", data->levels[i - 1]->nu, &data->levels[i - 1]->nu, NULL, 0));
284     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_eps_threshold", i));
285     PetscCall(PetscOptionsReal(prefix, "Local threshold for selecting deflation vectors returned by SLEPc", "PCHPDDM", data->levels[i - 1]->threshold, &data->levels[i - 1]->threshold, NULL));
286     if (i == 1) {
287       PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_1_st_share_sub_ksp"));
288       PetscCall(PetscOptionsBool(prefix, "Shared KSP between SLEPc ST and the fine-level subdomain solver", "PCHPDDMSetSTShareSubKSP", PETSC_FALSE, &data->share, NULL));
289     }
290     /* if there is no prescribed coarsening, just break out of the loop */
291     if (data->levels[i - 1]->threshold <= 0.0 && data->levels[i - 1]->nu <= 0 && !(data->deflation && i == 1)) break;
292     else {
293       ++i;
294       PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_eps_nev", i));
295       PetscCall(PetscOptionsHasName(PetscOptionsObject->options, PetscOptionsObject->prefix, prefix, &flg));
296       if (!flg) {
297         PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_eps_threshold", i));
298         PetscCall(PetscOptionsHasName(PetscOptionsObject->options, PetscOptionsObject->prefix, prefix, &flg));
299       }
300       if (flg) {
301         /* if there are coarsening options for the next level, then register it  */
302         /* otherwise, don't to avoid having both options levels_N_p and coarse_p */
303         PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_p", i));
304         PetscCall(PetscOptionsRangeInt(prefix, "Number of processes used to assemble the coarse operator at this level", "PCHPDDM", p, &p, &flg, 1, PetscMax(1, previous / 2)));
305         previous = p;
306       }
307     }
308   }
309   data->N = i;
310   n       = 1;
311   if (i > 1) {
312     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_coarse_p"));
313     PetscCall(PetscOptionsRangeInt(prefix, "Number of processes used to assemble the coarsest operator", "PCHPDDM", n, &n, NULL, 1, PetscMax(1, previous / 2)));
314 #if PetscDefined(HAVE_MUMPS)
315     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "pc_hpddm_coarse_"));
316     PetscCall(PetscOptionsHasName(NULL, prefix, "-mat_mumps_use_omp_threads", &flg));
317     if (flg) {
318       char type[64];                                                                                     /* same size as in src/ksp/pc/impls/factor/factimpl.c */
319       PetscCall(PetscStrcpy(type, n > 1 && PetscDefined(HAVE_MUMPS) ? MATSOLVERMUMPS : MATSOLVERPETSC)); /* default solver for a MatMPIAIJ or a MatSeqAIJ */
320       PetscCall(PetscOptionsGetString(NULL, prefix, "-pc_factor_mat_solver_type", type, sizeof(type), NULL));
321       PetscCall(PetscStrcmp(type, MATSOLVERMUMPS, &flg));
322       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "-%smat_mumps_use_omp_threads and -%spc_factor_mat_solver_type != %s", prefix, prefix, MATSOLVERMUMPS);
323       size = n;
324       n    = -1;
325       PetscCall(PetscOptionsGetInt(NULL, prefix, "-mat_mumps_use_omp_threads", &n, NULL));
326       PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Need to specify a positive integer for -%smat_mumps_use_omp_threads", prefix);
327       PetscCheck(n * size <= previous, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%d MPI process%s x %d OpenMP thread%s greater than %d available MPI process%s for the coarsest operator", (int)size, size > 1 ? "es" : "", (int)n, n > 1 ? "s" : "", (int)previous, previous > 1 ? "es" : "");
328     }
329 #endif
330     PetscCall(PetscOptionsEnum("-pc_hpddm_coarse_correction", "Type of coarse correction applied each iteration", "PCHPDDMSetCoarseCorrectionType", PCHPDDMCoarseCorrectionTypes, (PetscEnum)data->correction, (PetscEnum *)&type, &flg));
331     if (flg) PetscCall(PCHPDDMSetCoarseCorrectionType(pc, type));
332     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_has_neumann"));
333     PetscCall(PetscOptionsBool(prefix, "Is the auxiliary Mat the local Neumann matrix?", "PCHPDDMHasNeumannMat", PetscBool3ToBool(data->Neumann), &flg, &set));
334     if (set) data->Neumann = PetscBoolToBool3(flg);
335     data->log_separate = PETSC_FALSE;
336     if (PetscDefined(USE_LOG)) {
337       PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_log_separate"));
338       PetscCall(PetscOptionsBool(prefix, "Log events level by level instead of inside PCSetUp()/KSPSolve()", NULL, data->log_separate, &data->log_separate, NULL));
339     }
340   }
341   PetscOptionsHeadEnd();
342   while (i < PETSC_PCHPDDM_MAXLEVELS && data->levels[i]) PetscCall(PetscFree(data->levels[i++]));
343   PetscFunctionReturn(0);
344 }
345 
346 static PetscErrorCode PCApply_HPDDM(PC pc, Vec x, Vec y)
347 {
348   PC_HPDDM *data = (PC_HPDDM *)pc->data;
349 
350   PetscFunctionBegin;
351   PetscCall(PetscCitationsRegister(HPDDMCitation, &HPDDMCite));
352   PetscCheck(data->levels[0]->ksp, PETSC_COMM_SELF, PETSC_ERR_PLIB, "No KSP attached to PCHPDDM");
353   if (data->log_separate) PetscCall(PetscLogEventBegin(PC_HPDDM_Solve[0], data->levels[0]->ksp, 0, 0, 0)); /* coarser-level events are directly triggered in HPDDM */
354   PetscCall(KSPSolve(data->levels[0]->ksp, x, y));
355   if (data->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_Solve[0], data->levels[0]->ksp, 0, 0, 0));
356   PetscFunctionReturn(0);
357 }
358 
359 static PetscErrorCode PCMatApply_HPDDM(PC pc, Mat X, Mat Y)
360 {
361   PC_HPDDM *data = (PC_HPDDM *)pc->data;
362 
363   PetscFunctionBegin;
364   PetscCall(PetscCitationsRegister(HPDDMCitation, &HPDDMCite));
365   PetscCheck(data->levels[0]->ksp, PETSC_COMM_SELF, PETSC_ERR_PLIB, "No KSP attached to PCHPDDM");
366   PetscCall(KSPMatSolve(data->levels[0]->ksp, X, Y));
367   PetscFunctionReturn(0);
368 }
369 
370 /*@C
371      PCHPDDMGetComplexities - Computes the grid and operator complexities.
372 
373    Input Parameter:
374 .     pc - preconditioner context
375 
376    Output Parameters:
377 +     gc - grid complexity = sum_i(m_i) / m_1
378 -     oc - operator complexity = sum_i(nnz_i) / nnz_1
379 
380    Level: advanced
381 
382 .seealso: `PCMGGetGridComplexity()`, `PCHPDDM`, `PCHYPRE`, `PCGAMG`
383 @*/
384 static PetscErrorCode PCHPDDMGetComplexities(PC pc, PetscReal *gc, PetscReal *oc)
385 {
386   PC_HPDDM      *data = (PC_HPDDM *)pc->data;
387   MatInfo        info;
388   PetscInt       n, m;
389   PetscLogDouble accumulate[2]{}, nnz1 = 1.0, m1 = 1.0;
390 
391   PetscFunctionBegin;
392   for (n = 0, *gc = 0, *oc = 0; n < data->N; ++n) {
393     if (data->levels[n]->ksp) {
394       Mat P, A;
395       PetscCall(KSPGetOperators(data->levels[n]->ksp, NULL, &P));
396       PetscCall(MatGetSize(P, &m, NULL));
397       accumulate[0] += m;
398       if (n == 0) {
399         PetscBool flg;
400         PetscCall(PetscObjectTypeCompareAny((PetscObject)P, &flg, MATNORMAL, MATNORMALHERMITIAN, ""));
401         if (flg) {
402           PetscCall(MatConvert(P, MATAIJ, MAT_INITIAL_MATRIX, &A));
403           P = A;
404         } else PetscCall(PetscObjectReference((PetscObject)P));
405       }
406       if (P->ops->getinfo) {
407         PetscCall(MatGetInfo(P, MAT_GLOBAL_SUM, &info));
408         accumulate[1] += info.nz_used;
409       }
410       if (n == 0) {
411         m1 = m;
412         if (P->ops->getinfo) nnz1 = info.nz_used;
413         PetscCall(MatDestroy(&P));
414       }
415     }
416   }
417   *gc = static_cast<PetscReal>(accumulate[0] / m1);
418   *oc = static_cast<PetscReal>(accumulate[1] / nnz1);
419   PetscFunctionReturn(0);
420 }
421 
422 static PetscErrorCode PCView_HPDDM(PC pc, PetscViewer viewer)
423 {
424   PC_HPDDM    *data = (PC_HPDDM *)pc->data;
425   PetscViewer  subviewer;
426   PetscSubcomm subcomm;
427   PetscReal    oc, gc;
428   PetscInt     i, tabs;
429   PetscMPIInt  size, color, rank;
430   PetscBool    ascii;
431 
432   PetscFunctionBegin;
433   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &ascii));
434   if (ascii) {
435     PetscCall(PetscViewerASCIIPrintf(viewer, "level%s: %" PetscInt_FMT "\n", data->N > 1 ? "s" : "", data->N));
436     PetscCall(PCHPDDMGetComplexities(pc, &gc, &oc));
437     if (data->N > 1) {
438       if (!data->deflation) {
439         PetscCall(PetscViewerASCIIPrintf(viewer, "Neumann matrix attached? %s\n", PetscBools[PetscBool3ToBool(data->Neumann)]));
440         PetscCall(PetscViewerASCIIPrintf(viewer, "shared subdomain KSP between SLEPc and PETSc? %s\n", PetscBools[data->share]));
441       } else PetscCall(PetscViewerASCIIPrintf(viewer, "user-supplied deflation matrix\n"));
442       PetscCall(PetscViewerASCIIPrintf(viewer, "coarse correction: %s\n", PCHPDDMCoarseCorrectionTypes[data->correction]));
443       PetscCall(PetscViewerASCIIPrintf(viewer, "on process #0, value%s (+ threshold%s if available) for selecting deflation vectors:", data->N > 2 ? "s" : "", data->N > 2 ? "s" : ""));
444       PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
445       PetscCall(PetscViewerASCIISetTab(viewer, 0));
446       for (i = 1; i < data->N; ++i) {
447         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, data->levels[i - 1]->nu));
448         if (data->levels[i - 1]->threshold > -0.1) PetscCall(PetscViewerASCIIPrintf(viewer, " (%g)", (double)data->levels[i - 1]->threshold));
449       }
450       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
451       PetscCall(PetscViewerASCIISetTab(viewer, tabs));
452     }
453     PetscCall(PetscViewerASCIIPrintf(viewer, "grid and operator complexities: %g %g\n", (double)gc, (double)oc));
454     if (data->levels[0]->ksp) {
455       PetscCall(KSPView(data->levels[0]->ksp, viewer));
456       if (data->levels[0]->pc) PetscCall(PCView(data->levels[0]->pc, viewer));
457       for (i = 1; i < data->N; ++i) {
458         if (data->levels[i]->ksp) color = 1;
459         else color = 0;
460         PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
461         PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
462         PetscCall(PetscSubcommCreate(PetscObjectComm((PetscObject)pc), &subcomm));
463         PetscCall(PetscSubcommSetNumber(subcomm, PetscMin(size, 2)));
464         PetscCall(PetscSubcommSetTypeGeneral(subcomm, color, rank));
465         PetscCall(PetscViewerASCIIPushTab(viewer));
466         PetscCall(PetscViewerGetSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer));
467         if (color == 1) {
468           PetscCall(KSPView(data->levels[i]->ksp, subviewer));
469           if (data->levels[i]->pc) PetscCall(PCView(data->levels[i]->pc, subviewer));
470           PetscCall(PetscViewerFlush(subviewer));
471         }
472         PetscCall(PetscViewerRestoreSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer));
473         PetscCall(PetscViewerASCIIPopTab(viewer));
474         PetscCall(PetscSubcommDestroy(&subcomm));
475         PetscCall(PetscViewerFlush(viewer));
476       }
477     }
478   }
479   PetscFunctionReturn(0);
480 }
481 
482 static PetscErrorCode PCPreSolve_HPDDM(PC pc, KSP ksp, Vec, Vec)
483 {
484   PC_HPDDM *data = (PC_HPDDM *)pc->data;
485   PetscBool flg;
486   Mat       A;
487 
488   PetscFunctionBegin;
489   if (ksp) {
490     PetscCall(PetscObjectTypeCompare((PetscObject)ksp, KSPLSQR, &flg));
491     if (flg && !data->normal) {
492       PetscCall(KSPGetOperators(ksp, &A, NULL));
493       PetscCall(MatCreateVecs(A, NULL, &data->normal)); /* temporary Vec used in PCApply_HPDDMShell() for coarse grid corrections */
494     }
495   }
496   PetscFunctionReturn(0);
497 }
498 
499 static PetscErrorCode PCSetUp_HPDDMShell(PC pc)
500 {
501   PC_HPDDM_Level *ctx;
502   Mat             A, P;
503   Vec             x;
504   const char     *pcpre;
505 
506   PetscFunctionBegin;
507   PetscCall(PCShellGetContext(pc, &ctx));
508   PetscCall(KSPGetOptionsPrefix(ctx->ksp, &pcpre));
509   PetscCall(KSPGetOperators(ctx->ksp, &A, &P));
510   /* smoother */
511   PetscCall(PCSetOptionsPrefix(ctx->pc, pcpre));
512   PetscCall(PCSetOperators(ctx->pc, A, P));
513   if (!ctx->v[0]) {
514     PetscCall(VecDuplicateVecs(ctx->D, 1, &ctx->v[0]));
515     if (!std::is_same<PetscScalar, PetscReal>::value) PetscCall(VecDestroy(&ctx->D));
516     PetscCall(MatCreateVecs(A, &x, NULL));
517     PetscCall(VecDuplicateVecs(x, 2, &ctx->v[1]));
518     PetscCall(VecDestroy(&x));
519   }
520   std::fill_n(ctx->V, 3, nullptr);
521   PetscFunctionReturn(0);
522 }
523 
524 template <class Type, typename std::enable_if<std::is_same<Type, Vec>::value>::type * = nullptr>
525 static inline PetscErrorCode PCHPDDMDeflate_Private(PC pc, Type x, Type y)
526 {
527   PC_HPDDM_Level *ctx;
528   PetscScalar    *out;
529 
530   PetscFunctionBegin;
531   PetscCall(PCShellGetContext(pc, &ctx));
532   /* going from PETSc to HPDDM numbering */
533   PetscCall(VecScatterBegin(ctx->scatter, x, ctx->v[0][0], INSERT_VALUES, SCATTER_FORWARD));
534   PetscCall(VecScatterEnd(ctx->scatter, x, ctx->v[0][0], INSERT_VALUES, SCATTER_FORWARD));
535   PetscCall(VecGetArrayWrite(ctx->v[0][0], &out));
536   ctx->P->deflation<false>(NULL, out, 1); /* y = Q x */
537   PetscCall(VecRestoreArrayWrite(ctx->v[0][0], &out));
538   /* going from HPDDM to PETSc numbering */
539   PetscCall(VecScatterBegin(ctx->scatter, ctx->v[0][0], y, INSERT_VALUES, SCATTER_REVERSE));
540   PetscCall(VecScatterEnd(ctx->scatter, ctx->v[0][0], y, INSERT_VALUES, SCATTER_REVERSE));
541   PetscFunctionReturn(0);
542 }
543 
544 template <class Type, typename std::enable_if<std::is_same<Type, Mat>::value>::type * = nullptr>
545 static inline PetscErrorCode PCHPDDMDeflate_Private(PC pc, Type X, Type Y)
546 {
547   PC_HPDDM_Level *ctx;
548   Vec             vX, vY, vC;
549   PetscScalar    *out;
550   PetscInt        i, N;
551 
552   PetscFunctionBegin;
553   PetscCall(PCShellGetContext(pc, &ctx));
554   PetscCall(MatGetSize(X, NULL, &N));
555   /* going from PETSc to HPDDM numbering */
556   for (i = 0; i < N; ++i) {
557     PetscCall(MatDenseGetColumnVecRead(X, i, &vX));
558     PetscCall(MatDenseGetColumnVecWrite(ctx->V[0], i, &vC));
559     PetscCall(VecScatterBegin(ctx->scatter, vX, vC, INSERT_VALUES, SCATTER_FORWARD));
560     PetscCall(VecScatterEnd(ctx->scatter, vX, vC, INSERT_VALUES, SCATTER_FORWARD));
561     PetscCall(MatDenseRestoreColumnVecWrite(ctx->V[0], i, &vC));
562     PetscCall(MatDenseRestoreColumnVecRead(X, i, &vX));
563   }
564   PetscCall(MatDenseGetArrayWrite(ctx->V[0], &out));
565   ctx->P->deflation<false>(NULL, out, N); /* Y = Q X */
566   PetscCall(MatDenseRestoreArrayWrite(ctx->V[0], &out));
567   /* going from HPDDM to PETSc numbering */
568   for (i = 0; i < N; ++i) {
569     PetscCall(MatDenseGetColumnVecRead(ctx->V[0], i, &vC));
570     PetscCall(MatDenseGetColumnVecWrite(Y, i, &vY));
571     PetscCall(VecScatterBegin(ctx->scatter, vC, vY, INSERT_VALUES, SCATTER_REVERSE));
572     PetscCall(VecScatterEnd(ctx->scatter, vC, vY, INSERT_VALUES, SCATTER_REVERSE));
573     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &vY));
574     PetscCall(MatDenseRestoreColumnVecRead(ctx->V[0], i, &vC));
575   }
576   PetscFunctionReturn(0);
577 }
578 
579 /*
580      PCApply_HPDDMShell - Applies a (2) deflated, (1) additive, or (3) balanced coarse correction. In what follows, E = Z Pmat Z^T and Q = Z^T E^-1 Z.
581 
582 .vb
583    (1) y =                Pmat^-1              x + Q x,
584    (2) y =                Pmat^-1 (I - Amat Q) x + Q x (default),
585    (3) y = (I - Q Amat^T) Pmat^-1 (I - Amat Q) x + Q x.
586 .ve
587 
588    Input Parameters:
589 +     pc - preconditioner context
590 -     x - input vector
591 
592    Output Parameter:
593 .     y - output vector
594 
595    Notes:
596      The options of Pmat^1 = pc(Pmat) are prefixed by -pc_hpddm_levels_1_pc_. Z is a tall-and-skiny matrix assembled by HPDDM. The number of processes on which (Z Pmat Z^T) is aggregated is set via -pc_hpddm_coarse_p.
597      The options of (Z Pmat Z^T)^-1 = ksp(Z Pmat Z^T) are prefixed by -pc_hpddm_coarse_ (`KSPPREONLY` and `PCCHOLESKY` by default), unless a multilevel correction is turned on, in which case, this function is called recursively at each level except the coarsest one.
598      (1) and (2) visit the "next" level (in terms of coarsening) once per application, while (3) visits it twice, so it is asymptotically twice costlier. (2) is not symmetric even if both Amat and Pmat are symmetric.
599 
600    Level: advanced
601 
602    Developer Note:
603    Since this is not an actual manual page the material below should be moved to an appropriate manual page with the appropriate context, i.e. explaining when it is used and how
604    to trigger it. Likely the manual page is `PCHPDDM`
605 
606 .seealso: `PCHPDDM`, `PCHPDDMCoarseCorrectionType`
607 */
608 static PetscErrorCode PCApply_HPDDMShell(PC pc, Vec x, Vec y)
609 {
610   PC_HPDDM_Level *ctx;
611   Mat             A;
612 
613   PetscFunctionBegin;
614   PetscCall(PCShellGetContext(pc, &ctx));
615   PetscCheck(ctx->P, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCSHELL from PCHPDDM called with no HPDDM object");
616   PetscCall(KSPGetOperators(ctx->ksp, &A, NULL));
617   PetscCall(PCHPDDMDeflate_Private(pc, x, y)); /* y = Q x                          */
618   if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_DEFLATED || ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
619     if (!ctx->parent->normal || ctx != ctx->parent->levels[0]) PetscCall(MatMult(A, y, ctx->v[1][0])); /* y = A Q x */
620     else { /* KSPLSQR and finest level */ PetscCall(MatMult(A, y, ctx->parent->normal));               /* y = A Q x                        */
621       PetscCall(MatMultHermitianTranspose(A, ctx->parent->normal, ctx->v[1][0]));                      /* y = A^T A Q x    */
622     }
623     PetscCall(VecWAXPY(ctx->v[1][1], -1.0, ctx->v[1][0], x)); /* y = (I - A Q) x                  */
624     PetscCall(PCApply(ctx->pc, ctx->v[1][1], ctx->v[1][0]));  /* y = M^-1 (I - A Q) x             */
625     if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
626       if (!ctx->parent->normal || ctx != ctx->parent->levels[0]) PetscCall(MatMultHermitianTranspose(A, ctx->v[1][0], ctx->v[1][1])); /* z = A^T y */
627       else {
628         PetscCall(MatMult(A, ctx->v[1][0], ctx->parent->normal));
629         PetscCall(MatMultHermitianTranspose(A, ctx->parent->normal, ctx->v[1][1])); /* z = A^T A y    */
630       }
631       PetscCall(PCHPDDMDeflate_Private(pc, ctx->v[1][1], ctx->v[1][1]));
632       PetscCall(VecAXPBYPCZ(y, -1.0, 1.0, 1.0, ctx->v[1][1], ctx->v[1][0])); /* y = (I - Q A^T) y + Q x */
633     } else PetscCall(VecAXPY(y, 1.0, ctx->v[1][0]));                         /* y = Q M^-1 (I - A Q) x + Q x     */
634   } else {
635     PetscCheck(ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "PCSHELL from PCHPDDM called with an unknown PCHPDDMCoarseCorrectionType %d", ctx->parent->correction);
636     PetscCall(PCApply(ctx->pc, x, ctx->v[1][0]));
637     PetscCall(VecAXPY(y, 1.0, ctx->v[1][0])); /* y = M^-1 x + Q x                 */
638   }
639   PetscFunctionReturn(0);
640 }
641 
642 /*
643      PCMatApply_HPDDMShell - Variant of PCApply_HPDDMShell() for blocks of vectors.
644 
645    Input Parameters:
646 +     pc - preconditioner context
647 -     X - block of input vectors
648 
649    Output Parameter:
650 .     Y - block of output vectors
651 
652    Level: advanced
653 
654 .seealso: `PCHPDDM`, `PCApply_HPDDMShell()`, `PCHPDDMCoarseCorrectionType`
655 */
656 static PetscErrorCode PCMatApply_HPDDMShell(PC pc, Mat X, Mat Y)
657 {
658   PC_HPDDM_Level *ctx;
659   Mat             A, *ptr;
660   PetscContainer  container = NULL;
661   PetscScalar    *array;
662   PetscInt        m, M, N, prev = 0;
663   PetscBool       reset = PETSC_FALSE;
664 
665   PetscFunctionBegin;
666   PetscCall(PCShellGetContext(pc, &ctx));
667   PetscCheck(ctx->P, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCSHELL from PCHPDDM called with no HPDDM object");
668   PetscCall(MatGetSize(X, NULL, &N));
669   PetscCall(KSPGetOperators(ctx->ksp, &A, NULL));
670   PetscCall(PetscObjectQuery((PetscObject)A, "_HPDDM_MatProduct", (PetscObject *)&container));
671   if (container) { /* MatProduct container already attached */
672     PetscCall(PetscContainerGetPointer(container, (void **)&ptr));
673     if (ptr[1] != ctx->V[2]) /* Mat has changed or may have been set first in KSPHPDDM */
674       for (m = 0; m < 2; ++m) {
675         PetscCall(MatDestroy(ctx->V + m + 1));
676         ctx->V[m + 1] = ptr[m];
677         PetscCall(PetscObjectReference((PetscObject)ctx->V[m + 1]));
678       }
679   }
680   if (ctx->V[1]) PetscCall(MatGetSize(ctx->V[1], NULL, &prev));
681   if (N != prev || !ctx->V[0]) {
682     PetscCall(MatDestroy(ctx->V));
683     PetscCall(VecGetLocalSize(ctx->v[0][0], &m));
684     PetscCall(MatCreateDense(PetscObjectComm((PetscObject)pc), m, PETSC_DECIDE, PETSC_DECIDE, N, NULL, ctx->V));
685     if (N != prev) {
686       PetscCall(MatDestroy(ctx->V + 1));
687       PetscCall(MatDestroy(ctx->V + 2));
688       PetscCall(MatGetLocalSize(X, &m, NULL));
689       PetscCall(MatGetSize(X, &M, NULL));
690       if (ctx->parent->correction != PC_HPDDM_COARSE_CORRECTION_BALANCED) PetscCall(MatDenseGetArrayWrite(ctx->V[0], &array));
691       else array = NULL;
692       PetscCall(MatCreateDense(PetscObjectComm((PetscObject)pc), m, PETSC_DECIDE, M, N, array, ctx->V + 1));
693       if (ctx->parent->correction != PC_HPDDM_COARSE_CORRECTION_BALANCED) PetscCall(MatDenseRestoreArrayWrite(ctx->V[0], &array));
694       PetscCall(MatCreateDense(PetscObjectComm((PetscObject)pc), m, PETSC_DECIDE, M, N, NULL, ctx->V + 2));
695       PetscCall(MatProductCreateWithMat(A, Y, NULL, ctx->V[1]));
696       PetscCall(MatProductSetType(ctx->V[1], MATPRODUCT_AB));
697       PetscCall(MatProductSetFromOptions(ctx->V[1]));
698       PetscCall(MatProductSymbolic(ctx->V[1]));
699       if (!container) { /* no MatProduct container attached, create one to be queried in KSPHPDDM or at the next call to PCMatApply() */
700         PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)A), &container));
701         PetscCall(PetscObjectCompose((PetscObject)A, "_HPDDM_MatProduct", (PetscObject)container));
702       }
703       PetscCall(PetscContainerSetPointer(container, ctx->V + 1)); /* need to compose B and D from MatProductCreateWithMath(A, B, NULL, D), which are stored in the contiguous array ctx->V */
704     }
705     if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
706       PetscCall(MatProductCreateWithMat(A, ctx->V[1], NULL, ctx->V[2]));
707       PetscCall(MatProductSetType(ctx->V[2], MATPRODUCT_AtB));
708       PetscCall(MatProductSetFromOptions(ctx->V[2]));
709       PetscCall(MatProductSymbolic(ctx->V[2]));
710     }
711     ctx->P->start(N);
712   }
713   if (N == prev || container) { /* when MatProduct container is attached, always need to MatProductReplaceMats() since KSPHPDDM may have replaced the Mat as well */
714     PetscCall(MatProductReplaceMats(NULL, Y, NULL, ctx->V[1]));
715     if (container && ctx->parent->correction != PC_HPDDM_COARSE_CORRECTION_BALANCED) {
716       PetscCall(MatDenseGetArrayWrite(ctx->V[0], &array));
717       PetscCall(MatDensePlaceArray(ctx->V[1], array));
718       PetscCall(MatDenseRestoreArrayWrite(ctx->V[0], &array));
719       reset = PETSC_TRUE;
720     }
721   }
722   PetscCall(PCHPDDMDeflate_Private(pc, X, Y));
723   if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_DEFLATED || ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
724     PetscCall(MatProductNumeric(ctx->V[1]));
725     PetscCall(MatCopy(ctx->V[1], ctx->V[2], SAME_NONZERO_PATTERN));
726     PetscCall(MatAXPY(ctx->V[2], -1.0, X, SAME_NONZERO_PATTERN));
727     PetscCall(PCMatApply(ctx->pc, ctx->V[2], ctx->V[1]));
728     if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
729       PetscCall(MatProductNumeric(ctx->V[2]));
730       PetscCall(PCHPDDMDeflate_Private(pc, ctx->V[2], ctx->V[2]));
731       PetscCall(MatAXPY(ctx->V[1], -1.0, ctx->V[2], SAME_NONZERO_PATTERN));
732     }
733     PetscCall(MatAXPY(Y, -1.0, ctx->V[1], SAME_NONZERO_PATTERN));
734   } else {
735     PetscCheck(ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "PCSHELL from PCHPDDM called with an unknown PCHPDDMCoarseCorrectionType %d", ctx->parent->correction);
736     PetscCall(PCMatApply(ctx->pc, X, ctx->V[1]));
737     PetscCall(MatAXPY(Y, 1.0, ctx->V[1], SAME_NONZERO_PATTERN));
738   }
739   if (reset) PetscCall(MatDenseResetArray(ctx->V[1]));
740   PetscFunctionReturn(0);
741 }
742 
743 static PetscErrorCode PCDestroy_HPDDMShell(PC pc)
744 {
745   PC_HPDDM_Level *ctx;
746   PetscContainer  container;
747 
748   PetscFunctionBegin;
749   PetscCall(PCShellGetContext(pc, &ctx));
750   PetscCall(HPDDM::Schwarz<PetscScalar>::destroy(ctx, PETSC_TRUE));
751   PetscCall(VecDestroyVecs(1, &ctx->v[0]));
752   PetscCall(VecDestroyVecs(2, &ctx->v[1]));
753   PetscCall(PetscObjectQuery((PetscObject)(ctx->pc)->mat, "_HPDDM_MatProduct", (PetscObject *)&container));
754   PetscCall(PetscContainerDestroy(&container));
755   PetscCall(PetscObjectCompose((PetscObject)(ctx->pc)->mat, "_HPDDM_MatProduct", NULL));
756   PetscCall(MatDestroy(ctx->V));
757   PetscCall(MatDestroy(ctx->V + 1));
758   PetscCall(MatDestroy(ctx->V + 2));
759   PetscCall(VecDestroy(&ctx->D));
760   PetscCall(VecScatterDestroy(&ctx->scatter));
761   PetscCall(PCDestroy(&ctx->pc));
762   PetscFunctionReturn(0);
763 }
764 
765 static PetscErrorCode PCHPDDMSolve_Private(const PC_HPDDM_Level *ctx, PetscScalar *rhs, const unsigned short &mu)
766 {
767   Mat      B, X;
768   PetscInt n, N, j = 0;
769 
770   PetscFunctionBegin;
771   PetscCall(KSPGetOperators(ctx->ksp, &B, NULL));
772   PetscCall(MatGetLocalSize(B, &n, NULL));
773   PetscCall(MatGetSize(B, &N, NULL));
774   if (ctx->parent->log_separate) {
775     j = std::distance(ctx->parent->levels, std::find(ctx->parent->levels, ctx->parent->levels + ctx->parent->N, ctx));
776     PetscCall(PetscLogEventBegin(PC_HPDDM_Solve[j], ctx->ksp, 0, 0, 0));
777   }
778   if (mu == 1) {
779     if (!ctx->ksp->vec_rhs) {
780       PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)ctx->ksp), 1, n, N, NULL, &ctx->ksp->vec_rhs));
781       PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)ctx->ksp), n, N, &ctx->ksp->vec_sol));
782     }
783     PetscCall(VecPlaceArray(ctx->ksp->vec_rhs, rhs));
784     PetscCall(KSPSolve(ctx->ksp, NULL, NULL));
785     PetscCall(VecCopy(ctx->ksp->vec_sol, ctx->ksp->vec_rhs));
786     PetscCall(VecResetArray(ctx->ksp->vec_rhs));
787   } else {
788     PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ctx->ksp), n, PETSC_DECIDE, N, mu, rhs, &B));
789     PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ctx->ksp), n, PETSC_DECIDE, N, mu, NULL, &X));
790     PetscCall(KSPMatSolve(ctx->ksp, B, X));
791     PetscCall(MatCopy(X, B, SAME_NONZERO_PATTERN));
792     PetscCall(MatDestroy(&X));
793     PetscCall(MatDestroy(&B));
794   }
795   if (ctx->parent->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_Solve[j], ctx->ksp, 0, 0, 0));
796   PetscFunctionReturn(0);
797 }
798 
799 static PetscErrorCode PCHPDDMSetUpNeumannOverlap_Private(PC pc)
800 {
801   PC_HPDDM *data = (PC_HPDDM *)pc->data;
802 
803   PetscFunctionBegin;
804   if (data->setup) {
805     Mat       P;
806     Vec       x, xt = NULL;
807     PetscReal t = 0.0, s = 0.0;
808 
809     PetscCall(PCGetOperators(pc, NULL, &P));
810     PetscCall(PetscObjectQuery((PetscObject)P, "__SNES_latest_X", (PetscObject *)&x));
811     PetscCallBack("PCHPDDM Neumann callback", (*data->setup)(data->aux, t, x, xt, s, data->is, data->setup_ctx));
812   }
813   PetscFunctionReturn(0);
814 }
815 
816 static PetscErrorCode PCHPDDMCreateSubMatrices_Private(Mat mat, PetscInt n, const IS *, const IS *, MatReuse scall, Mat *submat[])
817 {
818   Mat A;
819 
820   PetscFunctionBegin;
821   PetscCheck(n == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatCreateSubMatrices() called to extract %" PetscInt_FMT " submatrices, which is different than 1", n);
822   /* previously composed Mat */
823   PetscCall(PetscObjectQuery((PetscObject)mat, "_PCHPDDM_SubMatrices", (PetscObject *)&A));
824   PetscCheck(A, PETSC_COMM_SELF, PETSC_ERR_PLIB, "SubMatrices not found in Mat");
825   if (scall == MAT_INITIAL_MATRIX) {
826     PetscCall(PetscCalloc1(1, submat));
827     PetscCall(MatDuplicate(A, MAT_COPY_VALUES, *submat));
828   } else PetscCall(MatCopy(A, (*submat)[0], SAME_NONZERO_PATTERN));
829   PetscFunctionReturn(0);
830 }
831 
832 static PetscErrorCode PCHPDDMCommunicationAvoidingPCASM_Private(PC pc, Mat C, PetscBool sorted)
833 {
834   void (*op)(void);
835 
836   PetscFunctionBegin;
837   /* previously-composed Mat */
838   PetscCall(PetscObjectCompose((PetscObject)pc->pmat, "_PCHPDDM_SubMatrices", (PetscObject)C));
839   PetscCall(MatGetOperation(pc->pmat, MATOP_CREATE_SUBMATRICES, &op));
840   /* trick suggested by Barry https://lists.mcs.anl.gov/pipermail/petsc-dev/2020-January/025491.html */
841   PetscCall(MatSetOperation(pc->pmat, MATOP_CREATE_SUBMATRICES, (void (*)(void))PCHPDDMCreateSubMatrices_Private));
842   if (sorted) PetscCall(PCASMSetSortIndices(pc, PETSC_FALSE)); /* everything is already sorted */
843   PetscCall(PCSetFromOptions(pc));                             /* otherwise -pc_hpddm_levels_1_pc_asm_sub_mat_type is not used */
844   PetscCall(PCSetUp(pc));
845   /* reset MatCreateSubMatrices() */
846   PetscCall(MatSetOperation(pc->pmat, MATOP_CREATE_SUBMATRICES, op));
847   PetscCall(PetscObjectCompose((PetscObject)pc->pmat, "_PCHPDDM_SubMatrices", NULL));
848   PetscFunctionReturn(0);
849 }
850 
851 static PetscErrorCode PCHPDDMPermute_Private(IS is, IS in_is, IS *out_is, Mat in_C, Mat *out_C, IS *p)
852 {
853   IS                           perm;
854   const PetscInt              *ptr;
855   PetscInt                    *concatenate, size, n;
856   std::map<PetscInt, PetscInt> order;
857   PetscBool                    sorted;
858 
859   PetscFunctionBegin;
860   PetscCall(ISSorted(is, &sorted));
861   if (!sorted) {
862     PetscCall(ISGetLocalSize(is, &size));
863     PetscCall(ISGetIndices(is, &ptr));
864     /* MatCreateSubMatrices(), called by PCASM, follows the global numbering of Pmat */
865     for (n = 0; n < size; ++n) order.insert(std::make_pair(ptr[n], n));
866     PetscCall(ISRestoreIndices(is, &ptr));
867     if (out_C) {
868       PetscCall(PetscMalloc1(size, &concatenate));
869       for (const std::pair<const PetscInt, PetscInt> &i : order) *concatenate++ = i.second;
870       concatenate -= size;
871       PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)in_C), size, concatenate, PETSC_OWN_POINTER, &perm));
872       PetscCall(ISSetPermutation(perm));
873       /* permute user-provided Mat so that it matches with MatCreateSubMatrices() numbering */
874       PetscCall(MatPermute(in_C, perm, perm, out_C));
875       if (p) *p = perm;
876       else PetscCall(ISDestroy(&perm)); /* no need to save the permutation */
877     }
878     if (out_is) {
879       PetscCall(PetscMalloc1(size, &concatenate));
880       for (const std::pair<const PetscInt, PetscInt> &i : order) *concatenate++ = i.first;
881       concatenate -= size;
882       /* permute user-provided IS so that it matches with MatCreateSubMatrices() numbering */
883       PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)in_is), size, concatenate, PETSC_OWN_POINTER, out_is));
884     }
885   } else { /* input IS is sorted, nothing to permute, simply duplicate inputs when needed */
886     if (out_C) PetscCall(MatDuplicate(in_C, MAT_COPY_VALUES, out_C));
887     if (out_is) PetscCall(ISDuplicate(in_is, out_is));
888   }
889   PetscFunctionReturn(0);
890 }
891 
892 static PetscErrorCode PCHPDDMDestroySubMatrices_Private(PetscBool flg, PetscBool algebraic, Mat *sub)
893 {
894   IS is;
895 
896   PetscFunctionBegin;
897   if (!flg) {
898     if (algebraic) {
899       PetscCall(PetscObjectQuery((PetscObject)sub[0], "_PCHPDDM_Embed", (PetscObject *)&is));
900       PetscCall(ISDestroy(&is));
901       PetscCall(PetscObjectCompose((PetscObject)sub[0], "_PCHPDDM_Embed", NULL));
902       PetscCall(PetscObjectCompose((PetscObject)sub[0], "_PCHPDDM_Compact", NULL));
903     }
904     PetscCall(MatDestroySubMatrices(algebraic ? 2 : 1, &sub));
905   }
906   PetscFunctionReturn(0);
907 }
908 
909 static PetscErrorCode PCHPDDMAlgebraicAuxiliaryMat_Private(Mat P, IS *is, Mat *sub[], PetscBool block)
910 {
911   IS         icol[3], irow[2];
912   Mat       *M, Q;
913   PetscReal *ptr;
914   PetscInt  *idx, p = 0, n, bs = PetscAbs(P->cmap->bs);
915   PetscBool  flg;
916 
917   PetscFunctionBegin;
918   PetscCall(ISCreateStride(PETSC_COMM_SELF, P->cmap->N, 0, 1, icol + 2));
919   PetscCall(ISSetBlockSize(icol[2], bs));
920   PetscCall(ISSetIdentity(icol[2]));
921   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATMPISBAIJ, &flg));
922   if (flg) {
923     /* MatCreateSubMatrices() does not handle MATMPISBAIJ properly when iscol != isrow, so convert first to MATMPIBAIJ */
924     PetscCall(MatConvert(P, MATMPIBAIJ, MAT_INITIAL_MATRIX, &Q));
925     std::swap(P, Q);
926   }
927   PetscCall(MatCreateSubMatrices(P, 1, is, icol + 2, MAT_INITIAL_MATRIX, &M));
928   if (flg) {
929     std::swap(P, Q);
930     PetscCall(MatDestroy(&Q));
931   }
932   PetscCall(ISDestroy(icol + 2));
933   PetscCall(ISCreateStride(PETSC_COMM_SELF, M[0]->rmap->N, 0, 1, irow));
934   PetscCall(ISSetBlockSize(irow[0], bs));
935   PetscCall(ISSetIdentity(irow[0]));
936   if (!block) {
937     PetscCall(PetscMalloc2(P->cmap->N, &ptr, P->cmap->N, &idx));
938     PetscCall(MatGetColumnNorms(M[0], NORM_INFINITY, ptr));
939     /* check for nonzero columns so that M[0] may be expressed in compact form */
940     for (n = 0; n < P->cmap->N; n += bs)
941       if (std::find_if(ptr + n, ptr + n + bs, [](PetscReal v) { return v > PETSC_MACHINE_EPSILON; }) != ptr + n + bs) {
942         std::iota(idx + p, idx + p + bs, n);
943         p += bs;
944       }
945     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, p, idx, PETSC_USE_POINTER, icol + 1));
946     PetscCall(ISSetBlockSize(icol[1], bs));
947     PetscCall(ISSetInfo(icol[1], IS_SORTED, IS_GLOBAL, PETSC_TRUE, PETSC_TRUE));
948     PetscCall(ISEmbed(*is, icol[1], PETSC_FALSE, icol + 2));
949     irow[1] = irow[0];
950     /* first Mat will be used in PCASM (if it is used as a PC on this level) and as the left-hand side of GenEO */
951     icol[0] = is[0];
952     PetscCall(MatCreateSubMatrices(M[0], 2, irow, icol, MAT_INITIAL_MATRIX, sub));
953     PetscCall(ISDestroy(icol + 1));
954     PetscCall(PetscFree2(ptr, idx));
955     /* IS used to go back and forth between the augmented and the original local linear system, see eq. (3.4) of [2022b] */
956     PetscCall(PetscObjectCompose((PetscObject)(*sub)[0], "_PCHPDDM_Embed", (PetscObject)icol[2]));
957     /* Mat used in eq. (3.1) of [2022b] */
958     PetscCall(PetscObjectCompose((PetscObject)(*sub)[0], "_PCHPDDM_Compact", (PetscObject)(*sub)[1]));
959   } else {
960     Mat aux;
961     PetscCall(MatSetOption(M[0], MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
962     /* diagonal block of the overlapping rows */
963     PetscCall(MatCreateSubMatrices(M[0], 1, irow, is, MAT_INITIAL_MATRIX, sub));
964     PetscCall(MatDuplicate((*sub)[0], MAT_COPY_VALUES, &aux));
965     PetscCall(MatSetOption(aux, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
966     if (bs == 1) { /* scalar case */
967       Vec sum[2];
968       PetscCall(MatCreateVecs(aux, sum, sum + 1));
969       PetscCall(MatGetRowSum(M[0], sum[0]));
970       PetscCall(MatGetRowSum(aux, sum[1]));
971       /* off-diagonal block row sum (full rows - diagonal block rows) */
972       PetscCall(VecAXPY(sum[0], -1.0, sum[1]));
973       /* subdomain matrix plus off-diagonal block row sum */
974       PetscCall(MatDiagonalSet(aux, sum[0], ADD_VALUES));
975       PetscCall(VecDestroy(sum));
976       PetscCall(VecDestroy(sum + 1));
977     } else { /* vectorial case */
978       /* TODO: missing MatGetValuesBlocked(), so the code below is     */
979       /* an extension of the scalar case for when bs > 1, but it could */
980       /* be more efficient by avoiding all these MatMatMult()          */
981       Mat          sum[2], ones;
982       PetscScalar *ptr;
983       PetscCall(PetscCalloc1(M[0]->cmap->n * bs, &ptr));
984       PetscCall(MatCreateDense(PETSC_COMM_SELF, M[0]->cmap->n, bs, M[0]->cmap->n, bs, ptr, &ones));
985       for (n = 0; n < M[0]->cmap->n; n += bs) {
986         for (p = 0; p < bs; ++p) ptr[n + p * (M[0]->cmap->n + 1)] = 1.0;
987       }
988       PetscCall(MatMatMult(M[0], ones, MAT_INITIAL_MATRIX, PETSC_DEFAULT, sum));
989       PetscCall(MatDestroy(&ones));
990       PetscCall(MatCreateDense(PETSC_COMM_SELF, aux->cmap->n, bs, aux->cmap->n, bs, ptr, &ones));
991       PetscCall(MatDenseSetLDA(ones, M[0]->cmap->n));
992       PetscCall(MatMatMult(aux, ones, MAT_INITIAL_MATRIX, PETSC_DEFAULT, sum + 1));
993       PetscCall(MatDestroy(&ones));
994       PetscCall(PetscFree(ptr));
995       /* off-diagonal block row sum (full rows - diagonal block rows) */
996       PetscCall(MatAXPY(sum[0], -1.0, sum[1], SAME_NONZERO_PATTERN));
997       PetscCall(MatDestroy(sum + 1));
998       /* re-order values to be consistent with MatSetValuesBlocked()           */
999       /* equivalent to MatTranspose() which does not truly handle              */
1000       /* MAT_INPLACE_MATRIX in the rectangular case, as it calls PetscMalloc() */
1001       PetscCall(MatDenseGetArrayWrite(sum[0], &ptr));
1002       HPDDM::Wrapper<PetscScalar>::imatcopy<'T'>(bs, sum[0]->rmap->n, ptr, sum[0]->rmap->n, bs);
1003       /* subdomain matrix plus off-diagonal block row sum */
1004       for (n = 0; n < aux->cmap->n / bs; ++n) PetscCall(MatSetValuesBlocked(aux, 1, &n, 1, &n, ptr + n * bs * bs, ADD_VALUES));
1005       PetscCall(MatAssemblyBegin(aux, MAT_FINAL_ASSEMBLY));
1006       PetscCall(MatAssemblyEnd(aux, MAT_FINAL_ASSEMBLY));
1007       PetscCall(MatDenseRestoreArrayWrite(sum[0], &ptr));
1008       PetscCall(MatDestroy(sum));
1009     }
1010     PetscCall(MatSetOption(aux, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
1011     /* left-hand side of GenEO, with the same sparsity pattern as PCASM subdomain solvers  */
1012     PetscCall(PetscObjectCompose((PetscObject)(*sub)[0], "_PCHPDDM_Neumann_Mat", (PetscObject)aux));
1013   }
1014   PetscCall(ISDestroy(irow));
1015   PetscCall(MatDestroySubMatrices(1, &M));
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 static PetscErrorCode PCSetUp_HPDDM(PC pc)
1020 {
1021   PC_HPDDM                 *data = (PC_HPDDM *)pc->data;
1022   PC                        inner;
1023   KSP                      *ksp;
1024   Mat                      *sub, A, P, N, C = NULL, uaux = NULL, weighted, subA[2];
1025   Vec                       xin, v;
1026   std::vector<Vec>          initial;
1027   IS                        is[1], loc, uis = data->is;
1028   ISLocalToGlobalMapping    l2g;
1029   char                      prefix[256];
1030   const char               *pcpre;
1031   const PetscScalar *const *ev;
1032   PetscInt                  n, requested = data->N, reused = 0;
1033   MatStructure              structure  = UNKNOWN_NONZERO_PATTERN;
1034   PetscBool                 subdomains = PETSC_FALSE, flg = PETSC_FALSE, ismatis, swap = PETSC_FALSE, algebraic = PETSC_FALSE, block = PETSC_FALSE;
1035   DM                        dm;
1036 
1037   PetscFunctionBegin;
1038   PetscCheck(data->levels && data->levels[0], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Not a single level allocated");
1039   PetscCall(PCGetOptionsPrefix(pc, &pcpre));
1040   PetscCall(PCGetOperators(pc, &A, &P));
1041   if (!data->levels[0]->ksp) {
1042     PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &data->levels[0]->ksp));
1043     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%spc_hpddm_%s_", pcpre ? pcpre : "", data->N > 1 ? "levels_1" : "coarse"));
1044     PetscCall(KSPSetOptionsPrefix(data->levels[0]->ksp, prefix));
1045     PetscCall(KSPSetType(data->levels[0]->ksp, KSPPREONLY));
1046   } else if (data->levels[0]->ksp->pc && data->levels[0]->ksp->pc->setupcalled == 1 && data->levels[0]->ksp->pc->reusepreconditioner) {
1047     /* if the fine-level PCSHELL exists, its setup has succeeded, and one wants to reuse it, */
1048     /* then just propagate the appropriate flag to the coarser levels                        */
1049     for (n = 0; n < PETSC_PCHPDDM_MAXLEVELS && data->levels[n]; ++n) {
1050       /* the following KSP and PC may be NULL for some processes, hence the check            */
1051       if (data->levels[n]->ksp) PetscCall(KSPSetReusePreconditioner(data->levels[n]->ksp, PETSC_TRUE));
1052       if (data->levels[n]->pc) PetscCall(PCSetReusePreconditioner(data->levels[n]->pc, PETSC_TRUE));
1053     }
1054     /* early bail out because there is nothing to do */
1055     PetscFunctionReturn(0);
1056   } else {
1057     /* reset coarser levels */
1058     for (n = 1; n < PETSC_PCHPDDM_MAXLEVELS && data->levels[n]; ++n) {
1059       if (data->levels[n]->ksp && data->levels[n]->ksp->pc && data->levels[n]->ksp->pc->setupcalled == 1 && data->levels[n]->ksp->pc->reusepreconditioner && n < data->N) {
1060         reused = data->N - n;
1061         break;
1062       }
1063       PetscCall(KSPDestroy(&data->levels[n]->ksp));
1064       PetscCall(PCDestroy(&data->levels[n]->pc));
1065     }
1066     /* check if some coarser levels are being reused */
1067     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &reused, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc)));
1068     const int *addr = data->levels[0]->P ? data->levels[0]->P->getAddrLocal() : &HPDDM::i__0;
1069 
1070     if (addr != &HPDDM::i__0 && reused != data->N - 1) {
1071       /* reuse previously computed eigenvectors */
1072       ev = data->levels[0]->P->getVectors();
1073       if (ev) {
1074         initial.reserve(*addr);
1075         PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, data->levels[0]->P->getDof(), ev[0], &xin));
1076         for (n = 0; n < *addr; ++n) {
1077           PetscCall(VecDuplicate(xin, &v));
1078           PetscCall(VecPlaceArray(xin, ev[n]));
1079           PetscCall(VecCopy(xin, v));
1080           initial.emplace_back(v);
1081           PetscCall(VecResetArray(xin));
1082         }
1083         PetscCall(VecDestroy(&xin));
1084       }
1085     }
1086   }
1087   data->N -= reused;
1088   PetscCall(KSPSetOperators(data->levels[0]->ksp, A, P));
1089 
1090   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATIS, &ismatis));
1091   if (!data->is && !ismatis) {
1092     PetscErrorCode (*create)(DM, IS *, Mat *, PetscErrorCode(**)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void **) = NULL;
1093     PetscErrorCode (*usetup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *)                                               = NULL;
1094     void *uctx                                                                                                              = NULL;
1095 
1096     /* first see if we can get the data from the DM */
1097     PetscCall(MatGetDM(P, &dm));
1098     if (!dm) PetscCall(MatGetDM(A, &dm));
1099     if (!dm) PetscCall(PCGetDM(pc, &dm));
1100     if (dm) { /* this is the hook for DMPLEX and DMDA for which the auxiliary Mat is the local Neumann matrix */
1101       PetscCall(PetscObjectQueryFunction((PetscObject)dm, "DMCreateNeumannOverlap_C", &create));
1102       if (create) {
1103         PetscCall((*create)(dm, &uis, &uaux, &usetup, &uctx));
1104         if (data->Neumann == PETSC_BOOL3_UNKNOWN) data->Neumann = PETSC_BOOL3_TRUE; /* set the value only if it was not already provided by the user */
1105       }
1106     }
1107     if (!create) {
1108       if (!uis) {
1109         PetscCall(PetscObjectQuery((PetscObject)pc, "_PCHPDDM_Neumann_IS", (PetscObject *)&uis));
1110         PetscCall(PetscObjectReference((PetscObject)uis));
1111       }
1112       if (!uaux) {
1113         PetscCall(PetscObjectQuery((PetscObject)pc, "_PCHPDDM_Neumann_Mat", (PetscObject *)&uaux));
1114         PetscCall(PetscObjectReference((PetscObject)uaux));
1115       }
1116       /* look inside the Pmat instead of the PC, needed for MatSchurComplementComputeExplicitOperator() */
1117       if (!uis) {
1118         PetscCall(PetscObjectQuery((PetscObject)P, "_PCHPDDM_Neumann_IS", (PetscObject *)&uis));
1119         PetscCall(PetscObjectReference((PetscObject)uis));
1120       }
1121       if (!uaux) {
1122         PetscCall(PetscObjectQuery((PetscObject)P, "_PCHPDDM_Neumann_Mat", (PetscObject *)&uaux));
1123         PetscCall(PetscObjectReference((PetscObject)uaux));
1124       }
1125     }
1126     PetscCall(PCHPDDMSetAuxiliaryMat(pc, uis, uaux, usetup, uctx));
1127     PetscCall(MatDestroy(&uaux));
1128     PetscCall(ISDestroy(&uis));
1129   }
1130 
1131   if (!ismatis) {
1132     PetscCall(PCHPDDMSetUpNeumannOverlap_Private(pc));
1133     PetscCall(PetscOptionsGetBool(NULL, pcpre, "-pc_hpddm_block_splitting", &block, NULL));
1134     if (data->is && block) {
1135       PetscCall(ISDestroy(&data->is));
1136       PetscCall(MatDestroy(&data->aux));
1137     }
1138     if (!data->is && data->N > 1) {
1139       char type[256] = {}; /* same size as in src/ksp/pc/interface/pcset.c */
1140       PetscCall(PetscObjectTypeCompareAny((PetscObject)P, &flg, MATNORMAL, MATNORMALHERMITIAN, ""));
1141       if (flg || (A->rmap->N != A->cmap->N && P->rmap->N == P->cmap->N && P->rmap->N == A->cmap->N)) {
1142         Mat B;
1143         PetscCall(PCHPDDMSetAuxiliaryMatNormal_Private(pc, A, P, &B, pcpre));
1144         if (data->correction == PC_HPDDM_COARSE_CORRECTION_DEFLATED) data->correction = PC_HPDDM_COARSE_CORRECTION_BALANCED;
1145         PetscCall(MatDestroy(&B));
1146       } else {
1147         PetscCall(PetscObjectTypeCompare((PetscObject)P, MATSCHURCOMPLEMENT, &flg));
1148         if (flg) {
1149           Mat                        A00, P00, A01, A10, A11, B, N;
1150           const PetscScalar         *array;
1151           PetscReal                  norm;
1152           MatSchurComplementAinvType type;
1153 
1154           PetscCall(MatSchurComplementGetSubMatrices(P, &A00, &P00, &A01, &A10, &A11));
1155           if (A11) {
1156             PetscCall(MatNorm(A11, NORM_INFINITY, &norm));
1157             PetscCheck(norm < PETSC_SMALL, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Nonzero A11 block");
1158           }
1159           if (PetscDefined(USE_DEBUG)) {
1160             Mat T, U = NULL;
1161             IS  z;
1162             PetscCall(PetscObjectTypeCompare((PetscObject)A10, MATTRANSPOSEVIRTUAL, &flg));
1163             if (flg) PetscCall(MatTransposeGetMat(A10, &U));
1164             else {
1165               PetscCall(PetscObjectTypeCompare((PetscObject)A10, MATHERMITIANTRANSPOSEVIRTUAL, &flg));
1166               if (flg) PetscCall(MatHermitianTransposeGetMat(A10, &U));
1167             }
1168             if (U) PetscCall(MatDuplicate(U, MAT_COPY_VALUES, &T));
1169             else PetscCall(MatHermitianTranspose(A10, MAT_INITIAL_MATRIX, &T));
1170             PetscCall(PetscLayoutCompare(T->rmap, A01->rmap, &flg));
1171             if (flg) {
1172               PetscCall(PetscLayoutCompare(T->cmap, A01->cmap, &flg));
1173               if (flg) {
1174                 PetscCall(MatFindZeroRows(A01, &z)); /* for essential boundary conditions, some implementations will */
1175                 if (z) {                             /*  zero rows in [P00 A01] except for the diagonal of P00       */
1176                   PetscCall(MatSetOption(T, MAT_NO_OFF_PROC_ZERO_ROWS, PETSC_TRUE));
1177                   PetscCall(MatZeroRowsIS(T, z, 0.0, NULL, NULL)); /* corresponding zero rows from A01 */
1178                   PetscCall(ISDestroy(&z));
1179                 }
1180                 PetscCall(MatMultEqual(A01, T, 10, &flg));
1181                 PetscCheck(flg, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "A01 != A10^T");
1182               } else PetscCall(PetscInfo(pc, "A01 and A10^T have non-congruent column layouts, cannot test for equality\n"));
1183             }
1184             PetscCall(MatDestroy(&T));
1185           }
1186           PetscCall(MatCreateVecs(P00, &v, NULL));
1187           PetscCall(MatSchurComplementGetAinvType(P, &type));
1188           PetscCheck(type == MAT_SCHUR_COMPLEMENT_AINV_DIAG || type == MAT_SCHUR_COMPLEMENT_AINV_LUMP, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "-%smat_schur_complement_ainv_type %s", ((PetscObject)P)->prefix ? ((PetscObject)P)->prefix : "", MatSchurComplementAinvTypes[type]);
1189           if (type == MAT_SCHUR_COMPLEMENT_AINV_LUMP) {
1190             PetscCall(MatGetRowSum(P00, v));
1191             if (A00 == P00) PetscCall(PetscObjectReference((PetscObject)A00));
1192             PetscCall(MatDestroy(&P00));
1193             PetscCall(VecGetArrayRead(v, &array));
1194             PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)A00), A00->rmap->n, A00->cmap->n, A00->rmap->N, A00->cmap->N, 1, NULL, 0, NULL, &P00));
1195             PetscCall(MatSetOption(P00, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
1196             for (n = A00->rmap->rstart; n < A00->rmap->rend; ++n) PetscCall(MatSetValue(P00, n, n, array[n - A00->rmap->rstart], INSERT_VALUES));
1197             PetscCall(MatAssemblyBegin(P00, MAT_FINAL_ASSEMBLY));
1198             PetscCall(MatAssemblyEnd(P00, MAT_FINAL_ASSEMBLY));
1199             PetscCall(VecRestoreArrayRead(v, &array));
1200             PetscCall(MatSchurComplementUpdateSubMatrices(P, A00, P00, A01, A10, A11)); /* replace P00 by diag(sum of each row of P00) */
1201             PetscCall(MatDestroy(&P00));
1202           } else PetscCall(MatGetDiagonal(P00, v));
1203           PetscCall(VecReciprocal(v)); /* inv(diag(P00))       */
1204           PetscCall(VecSqrtAbs(v));    /* sqrt(inv(diag(P00))) */
1205           PetscCall(MatDuplicate(A01, MAT_COPY_VALUES, &B));
1206           PetscCall(MatDiagonalScale(B, v, NULL));
1207           PetscCall(VecDestroy(&v));
1208           PetscCall(MatCreateNormalHermitian(B, &N));
1209           PetscCall(PCHPDDMSetAuxiliaryMatNormal_Private(pc, B, N, &P, pcpre));
1210           PetscCall(PetscObjectTypeCompare((PetscObject)data->aux, MATSEQAIJ, &flg));
1211           if (!flg) {
1212             PetscCall(MatDestroy(&P));
1213             P = N;
1214             PetscCall(PetscObjectReference((PetscObject)P));
1215           } else PetscCall(MatScale(P, -1.0));
1216           PetscCall(MatScale(N, -1.0));
1217           PetscCall(PCSetOperators(pc, N, P)); /* replace P by -A01' inv(diag(P00)) A01 */
1218           PetscCall(MatDestroy(&N));
1219           PetscCall(MatDestroy(&P));
1220           PetscCall(MatDestroy(&B));
1221           PetscFunctionReturn(0);
1222         } else {
1223           PetscCall(PetscOptionsGetString(NULL, pcpre, "-pc_hpddm_levels_1_st_pc_type", type, sizeof(type), NULL));
1224           PetscCall(PetscStrcmp(type, PCMAT, &algebraic));
1225           PetscCall(PetscOptionsGetBool(NULL, pcpre, "-pc_hpddm_block_splitting", &block, NULL));
1226           PetscCheck(!algebraic || !block, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "-pc_hpddm_levels_1_st_pc_type mat and -pc_hpddm_block_splitting");
1227           if (block) algebraic = PETSC_TRUE;
1228           if (algebraic) {
1229             PetscCall(ISCreateStride(PETSC_COMM_SELF, P->rmap->n, P->rmap->rstart, 1, &data->is));
1230             PetscCall(MatIncreaseOverlap(P, 1, &data->is, 1));
1231             PetscCall(ISSort(data->is));
1232           } else PetscCall(PetscInfo(pc, "Cannot assemble a fully-algebraic coarse operator with an assembled Pmat and -%spc_hpddm_levels_1_st_pc_type != mat and -%spc_hpddm_block_splitting != true\n", pcpre ? pcpre : "", pcpre ? pcpre : ""));
1233         }
1234       }
1235     }
1236   }
1237 
1238   if (data->is || (ismatis && data->N > 1)) {
1239     if (ismatis) {
1240       std::initializer_list<std::string> list = {MATSEQBAIJ, MATSEQSBAIJ};
1241       PetscCall(MatISGetLocalMat(P, &N));
1242       std::initializer_list<std::string>::const_iterator it = std::find(list.begin(), list.end(), ((PetscObject)N)->type_name);
1243       PetscCall(MatISRestoreLocalMat(P, &N));
1244       switch (std::distance(list.begin(), it)) {
1245       case 0:
1246         PetscCall(MatConvert(P, MATMPIBAIJ, MAT_INITIAL_MATRIX, &C));
1247         break;
1248       case 1:
1249         /* MatCreateSubMatrices() does not work with MATSBAIJ and unsorted ISes, so convert to MPIBAIJ */
1250         PetscCall(MatConvert(P, MATMPIBAIJ, MAT_INITIAL_MATRIX, &C));
1251         PetscCall(MatSetOption(C, MAT_SYMMETRIC, PETSC_TRUE));
1252         break;
1253       default:
1254         PetscCall(MatConvert(P, MATMPIAIJ, MAT_INITIAL_MATRIX, &C));
1255       }
1256       PetscCall(MatISGetLocalToGlobalMapping(P, &l2g, NULL));
1257       PetscCall(PetscObjectReference((PetscObject)P));
1258       PetscCall(KSPSetOperators(data->levels[0]->ksp, A, C));
1259       std::swap(C, P);
1260       PetscCall(ISLocalToGlobalMappingGetSize(l2g, &n));
1261       PetscCall(ISCreateStride(PETSC_COMM_SELF, n, 0, 1, &loc));
1262       PetscCall(ISLocalToGlobalMappingApplyIS(l2g, loc, &is[0]));
1263       PetscCall(ISDestroy(&loc));
1264       /* the auxiliary Mat is _not_ the local Neumann matrix                                */
1265       /* it is the local Neumann matrix augmented (with zeros) through MatIncreaseOverlap() */
1266       data->Neumann = PETSC_BOOL3_FALSE;
1267       structure     = SAME_NONZERO_PATTERN;
1268       if (data->share) {
1269         data->share = PETSC_FALSE;
1270         PetscCall(PetscInfo(pc, "Cannot share subdomain KSP between SLEPc and PETSc with a Pmat of type MATIS\n"));
1271       }
1272     } else {
1273       is[0] = data->is;
1274       if (algebraic) subdomains = PETSC_TRUE;
1275       PetscCall(PetscOptionsGetBool(NULL, pcpre, "-pc_hpddm_define_subdomains", &subdomains, NULL));
1276       if (data->share) {
1277         if (!subdomains) {
1278           PetscCall(PetscInfo(pc, "Cannot share subdomain KSP between SLEPc and PETSc since -%spc_hpddm_define_subdomains is not true\n", pcpre ? pcpre : ""));
1279           data->share = PETSC_FALSE;
1280         }
1281         if (data->deflation) {
1282           PetscCall(PetscInfo(pc, "Nothing to share since PCHPDDMSetDeflationMat() has been called\n"));
1283           data->share = PETSC_FALSE;
1284         }
1285       }
1286       if (PetscBool3ToBool(data->Neumann)) {
1287         PetscCheck(!block, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "-pc_hpddm_block_splitting and -pc_hpddm_has_neumann");
1288         PetscCheck(!algebraic, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "-pc_hpddm_levels_1_st_pc_type mat and -pc_hpddm_has_neumann");
1289       }
1290       if (PetscBool3ToBool(data->Neumann) || block) structure = SAME_NONZERO_PATTERN;
1291       PetscCall(ISCreateStride(PetscObjectComm((PetscObject)data->is), P->rmap->n, P->rmap->rstart, 1, &loc));
1292     }
1293     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%spc_hpddm_levels_1_", pcpre ? pcpre : ""));
1294     PetscCall(PetscOptionsGetEnum(NULL, prefix, "-st_matstructure", MatStructures, (PetscEnum *)&structure, &flg)); /* if not user-provided, force its value when possible */
1295     if (!flg && structure == SAME_NONZERO_PATTERN) {                                                                /* cannot call STSetMatStructure() yet, insert the appropriate option in the database, parsed by STSetFromOptions() */
1296       PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-%spc_hpddm_levels_1_st_matstructure", pcpre ? pcpre : ""));
1297       PetscCall(PetscOptionsSetValue(NULL, prefix, MatStructures[structure]));
1298     }
1299     if (data->N > 1 && (data->aux || ismatis || algebraic)) {
1300       PetscCheck(loadedSym, PETSC_COMM_SELF, PETSC_ERR_PLIB, "HPDDM library not loaded, cannot use more than one level");
1301       PetscCall(MatSetOption(P, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
1302       if (ismatis) {
1303         /* needed by HPDDM (currently) so that the partition of unity is 0 on subdomain interfaces */
1304         PetscCall(MatIncreaseOverlap(P, 1, is, 1));
1305         PetscCall(ISDestroy(&data->is));
1306         data->is = is[0];
1307       } else {
1308         if (PetscDefined(USE_DEBUG)) {
1309           PetscBool equal;
1310           IS        intersect;
1311 
1312           PetscCall(ISIntersect(data->is, loc, &intersect));
1313           PetscCall(ISEqualUnsorted(loc, intersect, &equal));
1314           PetscCall(ISDestroy(&intersect));
1315           PetscCheck(equal, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "IS of the auxiliary Mat does not include all local rows of A");
1316         }
1317         PetscCall(PetscObjectComposeFunction((PetscObject)pc->pmat, "PCHPDDMAlgebraicAuxiliaryMat_Private_C", PCHPDDMAlgebraicAuxiliaryMat_Private));
1318         if (!PetscBool3ToBool(data->Neumann) && !algebraic) {
1319           PetscCall(PetscObjectTypeCompare((PetscObject)P, MATMPISBAIJ, &flg));
1320           if (flg) {
1321             /* maybe better to ISSort(is[0]), MatCreateSubMatrices(), and then MatPermute() */
1322             /* but there is no MatPermute_SeqSBAIJ(), so as before, just use MATMPIBAIJ     */
1323             PetscCall(MatConvert(P, MATMPIBAIJ, MAT_INITIAL_MATRIX, &uaux));
1324             flg = PETSC_FALSE;
1325           }
1326         }
1327       }
1328       if (algebraic) {
1329         PetscUseMethod(pc->pmat, "PCHPDDMAlgebraicAuxiliaryMat_Private_C", (Mat, IS *, Mat *[], PetscBool), (P, is, &sub, block));
1330         if (block) {
1331           PetscCall(PetscObjectQuery((PetscObject)sub[0], "_PCHPDDM_Neumann_Mat", (PetscObject *)&data->aux));
1332           PetscCall(PetscObjectCompose((PetscObject)sub[0], "_PCHPDDM_Neumann_Mat", NULL));
1333         }
1334       } else if (!uaux) {
1335         if (PetscBool3ToBool(data->Neumann)) sub = &data->aux;
1336         else PetscCall(MatCreateSubMatrices(P, 1, is, is, MAT_INITIAL_MATRIX, &sub));
1337       } else {
1338         PetscCall(MatCreateSubMatrices(uaux, 1, is, is, MAT_INITIAL_MATRIX, &sub));
1339         PetscCall(MatDestroy(&uaux));
1340         PetscCall(MatConvert(sub[0], MATSEQSBAIJ, MAT_INPLACE_MATRIX, sub));
1341       }
1342       /* Vec holding the partition of unity */
1343       if (!data->levels[0]->D) {
1344         PetscCall(ISGetLocalSize(data->is, &n));
1345         PetscCall(VecCreateMPI(PETSC_COMM_SELF, n, PETSC_DETERMINE, &data->levels[0]->D));
1346       }
1347       if (data->share && structure == SAME_NONZERO_PATTERN) { /* share the KSP only when the MatStructure is SAME_NONZERO_PATTERN */
1348         Mat      D;
1349         IS       perm = NULL;
1350         PetscInt size = -1;
1351         PetscCall(PCHPDDMPermute_Private(*is, data->is, &uis, PetscBool3ToBool(data->Neumann) || block ? sub[0] : data->aux, &C, &perm));
1352         if (!PetscBool3ToBool(data->Neumann) && !block) {
1353           PetscCall(MatPermute(sub[0], perm, perm, &D)); /* permute since PCASM will call ISSort() */
1354           PetscCall(MatHeaderReplace(sub[0], &D));
1355         }
1356         if (data->B) { /* see PCHPDDMSetRHSMat() */
1357           PetscCall(MatPermute(data->B, perm, perm, &D));
1358           PetscCall(MatHeaderReplace(data->B, &D));
1359         }
1360         PetscCall(ISDestroy(&perm));
1361         if (!data->levels[0]->pc) {
1362           PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%spc_hpddm_levels_1_", pcpre ? pcpre : ""));
1363           PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), &data->levels[0]->pc));
1364           PetscCall(PCSetOptionsPrefix(data->levels[0]->pc, prefix));
1365           PetscCall(PCSetOperators(data->levels[0]->pc, A, P));
1366         }
1367         PetscCall(PCSetType(data->levels[0]->pc, PCASM));
1368         if (!data->levels[0]->pc->setupcalled) PetscCall(PCASMSetLocalSubdomains(data->levels[0]->pc, 1, is, &loc));
1369         PetscCall(PCSetFromOptions(data->levels[0]->pc));
1370         if (block) PetscCall(PCHPDDMCommunicationAvoidingPCASM_Private(data->levels[0]->pc, C, algebraic));
1371         else PetscCall(PCSetUp(data->levels[0]->pc));
1372         PetscTryMethod(data->levels[0]->pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (data->levels[0]->pc, &size, NULL, &ksp));
1373         if (size != 1) {
1374           PetscCall(PCDestroy(&data->levels[0]->pc));
1375           PetscCall(MatDestroy(&C));
1376           PetscCall(ISDestroy(&uis));
1377           data->share = PETSC_FALSE;
1378           PetscCheck(size == -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of subdomain solver %" PetscInt_FMT " != 1", size);
1379           PetscCall(PetscInfo(pc, "Cannot share subdomain KSP between SLEPc and PETSc since PCASMGetSubKSP() not found in fine-level PC\n"));
1380         } else {
1381           const char *matpre;
1382           PetscBool   cmp[4];
1383           PetscCall(KSPGetOperators(ksp[0], subA, subA + 1));
1384           PetscCall(MatDuplicate(subA[1], MAT_SHARE_NONZERO_PATTERN, &D));
1385           PetscCall(MatGetOptionsPrefix(subA[1], &matpre));
1386           PetscCall(MatSetOptionsPrefix(D, matpre));
1387           PetscCall(PetscObjectTypeCompare((PetscObject)D, MATNORMAL, cmp));
1388           PetscCall(PetscObjectTypeCompare((PetscObject)C, MATNORMAL, cmp + 1));
1389           if (!cmp[0]) PetscCall(PetscObjectTypeCompare((PetscObject)D, MATNORMALHERMITIAN, cmp + 2));
1390           else cmp[2] = PETSC_FALSE;
1391           if (!cmp[1]) PetscCall(PetscObjectTypeCompare((PetscObject)C, MATNORMALHERMITIAN, cmp + 3));
1392           else cmp[3] = PETSC_FALSE;
1393           PetscCheck(cmp[0] == cmp[1] && cmp[2] == cmp[3], PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "-%spc_hpddm_levels_1_pc_asm_sub_mat_type %s and auxiliary Mat of type %s", pcpre ? pcpre : "", ((PetscObject)D)->type_name, ((PetscObject)C)->type_name);
1394           if (!cmp[0] && !cmp[2]) {
1395             if (!block) PetscCall(MatAXPY(D, 1.0, C, SUBSET_NONZERO_PATTERN));
1396             else {
1397               PetscCall(MatMissingDiagonal(D, cmp, NULL));
1398               if (cmp[0]) structure = DIFFERENT_NONZERO_PATTERN; /* data->aux has no missing diagonal entry */
1399               PetscCall(MatAXPY(D, 1.0, data->aux, structure));
1400             }
1401           } else {
1402             Mat mat[2];
1403             if (cmp[0]) {
1404               PetscCall(MatNormalGetMat(D, mat));
1405               PetscCall(MatNormalGetMat(C, mat + 1));
1406             } else {
1407               PetscCall(MatNormalHermitianGetMat(D, mat));
1408               PetscCall(MatNormalHermitianGetMat(C, mat + 1));
1409             }
1410             PetscCall(MatAXPY(mat[0], 1.0, mat[1], SUBSET_NONZERO_PATTERN));
1411           }
1412           PetscCall(MatPropagateSymmetryOptions(C, D));
1413           PetscCall(MatDestroy(&C));
1414           C = D;
1415           /* swap pointers so that variables stay consistent throughout PCSetUp() */
1416           std::swap(C, data->aux);
1417           std::swap(uis, data->is);
1418           swap = PETSC_TRUE;
1419         }
1420       } else if (data->share) {
1421         data->share = PETSC_FALSE;
1422         PetscCall(PetscInfo(pc, "Cannot share subdomain KSP between SLEPc and PETSc since -%spc_hpddm_levels_1_st_matstructure %s (!= %s)\n", pcpre ? pcpre : "", MatStructures[structure], MatStructures[SAME_NONZERO_PATTERN]));
1423       }
1424       if (!data->levels[0]->scatter) {
1425         PetscCall(MatCreateVecs(P, &xin, NULL));
1426         if (ismatis) PetscCall(MatDestroy(&P));
1427         PetscCall(VecScatterCreate(xin, data->is, data->levels[0]->D, NULL, &data->levels[0]->scatter));
1428         PetscCall(VecDestroy(&xin));
1429       }
1430       if (data->levels[0]->P) {
1431         /* if the pattern is the same and PCSetUp() has previously succeeded, reuse HPDDM buffers and connectivity */
1432         PetscCall(HPDDM::Schwarz<PetscScalar>::destroy(data->levels[0], pc->setupcalled < 1 || pc->flag == DIFFERENT_NONZERO_PATTERN ? PETSC_TRUE : PETSC_FALSE));
1433       }
1434       if (!data->levels[0]->P) data->levels[0]->P = new HPDDM::Schwarz<PetscScalar>();
1435       if (data->log_separate) PetscCall(PetscLogEventBegin(PC_HPDDM_SetUp[0], data->levels[0]->ksp, 0, 0, 0));
1436       else PetscCall(PetscLogEventBegin(PC_HPDDM_Strc, data->levels[0]->ksp, 0, 0, 0));
1437       /* HPDDM internal data structure */
1438       PetscCall(data->levels[0]->P->structure(loc, data->is, sub[0], ismatis ? C : data->aux, data->levels));
1439       if (!data->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_Strc, data->levels[0]->ksp, 0, 0, 0));
1440       /* matrix pencil of the generalized eigenvalue problem on the overlap (GenEO) */
1441       if (data->deflation) weighted = data->aux;
1442       else if (!data->B) {
1443         PetscBool cmp[2];
1444         PetscCall(MatDuplicate(sub[0], MAT_COPY_VALUES, &weighted));
1445         PetscCall(PetscObjectTypeCompare((PetscObject)weighted, MATNORMAL, cmp));
1446         if (!cmp[0]) PetscCall(PetscObjectTypeCompare((PetscObject)weighted, MATNORMALHERMITIAN, cmp + 1));
1447         else cmp[1] = PETSC_FALSE;
1448         if (!cmp[0] && !cmp[1]) PetscCall(MatDiagonalScale(weighted, data->levels[0]->D, data->levels[0]->D));
1449         else { /* MATNORMAL applies MatDiagonalScale() in a matrix-free fashion, not what is needed since this won't be passed to SLEPc during the eigensolve */
1450           if (cmp[0]) PetscCall(MatNormalGetMat(weighted, &data->B));
1451           else PetscCall(MatNormalHermitianGetMat(weighted, &data->B));
1452           PetscCall(MatDiagonalScale(data->B, NULL, data->levels[0]->D));
1453           data->B = NULL;
1454           flg     = PETSC_FALSE;
1455         }
1456         /* neither MatDuplicate() nor MatDiagonaleScale() handles the symmetry options, so propagate the options explicitly */
1457         /* only useful for -mat_type baij -pc_hpddm_levels_1_st_pc_type cholesky (no problem with MATAIJ or MATSBAIJ)       */
1458         PetscCall(MatPropagateSymmetryOptions(sub[0], weighted));
1459       } else weighted = data->B;
1460       /* SLEPc is used inside the loaded symbol */
1461       PetscCall((*loadedSym)(data->levels[0]->P, data->is, ismatis ? C : (algebraic && !block ? sub[0] : data->aux), weighted, data->B, initial, data->levels));
1462       if (data->share) {
1463         Mat st[2];
1464         PetscCall(KSPGetOperators(ksp[0], st, st + 1));
1465         PetscCall(MatCopy(subA[0], st[0], structure));
1466         if (subA[1] != subA[0] || st[1] != st[0]) PetscCall(MatCopy(subA[1], st[1], SAME_NONZERO_PATTERN));
1467       }
1468       if (data->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_SetUp[0], data->levels[0]->ksp, 0, 0, 0));
1469       if (ismatis) PetscCall(MatISGetLocalMat(C, &N));
1470       else N = data->aux;
1471       P = sub[0];
1472       /* going through the grid hierarchy */
1473       for (n = 1; n < data->N; ++n) {
1474         if (data->log_separate) PetscCall(PetscLogEventBegin(PC_HPDDM_SetUp[n], data->levels[n]->ksp, 0, 0, 0));
1475         /* method composed in the loaded symbol since there, SLEPc is used as well */
1476         PetscTryMethod(data->levels[0]->ksp, "PCHPDDMSetUp_Private_C", (Mat *, Mat *, PetscInt, PetscInt *const, PC_HPDDM_Level **const), (&P, &N, n, &data->N, data->levels));
1477         if (data->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_SetUp[n], data->levels[n]->ksp, 0, 0, 0));
1478       }
1479       /* reset to NULL to avoid any faulty use */
1480       PetscCall(PetscObjectComposeFunction((PetscObject)data->levels[0]->ksp, "PCHPDDMSetUp_Private_C", NULL));
1481       if (!ismatis) PetscCall(PetscObjectComposeFunction((PetscObject)pc->pmat, "PCHPDDMAlgebraicAuxiliaryMat_C", NULL));
1482       else PetscCall(PetscObjectDereference((PetscObject)C)); /* matching PetscObjectReference() above */
1483       for (n = 0; n < data->N - 1; ++n)
1484         if (data->levels[n]->P) {
1485           /* HPDDM internal work buffers */
1486           data->levels[n]->P->setBuffer();
1487           data->levels[n]->P->super::start();
1488         }
1489       if (ismatis || !subdomains) PetscCall(PCHPDDMDestroySubMatrices_Private(PetscBool3ToBool(data->Neumann), PetscBool(algebraic && !block), sub));
1490       if (ismatis) data->is = NULL;
1491       for (n = 0; n < data->N - 1 + (reused > 0); ++n) {
1492         if (data->levels[n]->P) {
1493           PC spc;
1494 
1495           /* force the PC to be PCSHELL to do the coarse grid corrections */
1496           PetscCall(KSPSetSkipPCSetFromOptions(data->levels[n]->ksp, PETSC_TRUE));
1497           PetscCall(KSPGetPC(data->levels[n]->ksp, &spc));
1498           PetscCall(PCSetType(spc, PCSHELL));
1499           PetscCall(PCShellSetContext(spc, data->levels[n]));
1500           PetscCall(PCShellSetSetUp(spc, PCSetUp_HPDDMShell));
1501           PetscCall(PCShellSetApply(spc, PCApply_HPDDMShell));
1502           PetscCall(PCShellSetMatApply(spc, PCMatApply_HPDDMShell));
1503           PetscCall(PCShellSetDestroy(spc, PCDestroy_HPDDMShell));
1504           if (!data->levels[n]->pc) PetscCall(PCCreate(PetscObjectComm((PetscObject)data->levels[n]->ksp), &data->levels[n]->pc));
1505           if (n < reused) {
1506             PetscCall(PCSetReusePreconditioner(spc, PETSC_TRUE));
1507             PetscCall(PCSetReusePreconditioner(data->levels[n]->pc, PETSC_TRUE));
1508           }
1509           PetscCall(PCSetUp(spc));
1510         }
1511       }
1512       PetscCall(PetscObjectComposeFunction((PetscObject)pc->pmat, "PCHPDDMAlgebraicAuxiliaryMat_Private_C", NULL));
1513     } else flg = reused ? PETSC_FALSE : PETSC_TRUE;
1514     if (!ismatis && subdomains) {
1515       if (flg) PetscCall(KSPGetPC(data->levels[0]->ksp, &inner));
1516       else inner = data->levels[0]->pc;
1517       if (inner) {
1518         PetscCall(PCSetType(inner, PCASM)); /* inner is the fine-level PC for which one must ensure                       */
1519                                             /* PCASMSetLocalSubdomains() has been called when -pc_hpddm_define_subdomains */
1520         if (!inner->setupcalled) {          /* evaluates to PETSC_FALSE when -pc_hpddm_block_splitting */
1521           PetscCall(PCASMSetLocalSubdomains(inner, 1, is, &loc));
1522           if (!PetscBool3ToBool(data->Neumann) && data->N > 1) { /* subdomain matrices are already created for the eigenproblem, reuse them for the fine-level PC */
1523             PetscCall(PCHPDDMPermute_Private(*is, NULL, NULL, sub[0], &C, NULL));
1524             PetscCall(PCHPDDMCommunicationAvoidingPCASM_Private(inner, C, algebraic));
1525             PetscCall(MatDestroy(&C));
1526           }
1527         }
1528       }
1529       if (data->N > 1) PetscCall(PCHPDDMDestroySubMatrices_Private(PetscBool3ToBool(data->Neumann), PetscBool(algebraic && !block), sub));
1530     }
1531     PetscCall(ISDestroy(&loc));
1532   } else data->N = 1 + reused; /* enforce this value to 1 + reused if there is no way to build another level */
1533   if (requested != data->N + reused) {
1534     PetscCall(PetscInfo(pc, "%" PetscInt_FMT " levels requested, only %" PetscInt_FMT " built + %" PetscInt_FMT " reused. Options for level(s) > %" PetscInt_FMT ", including -%spc_hpddm_coarse_ will not be taken into account\n", requested, data->N, reused,
1535                         data->N, pcpre ? pcpre : ""));
1536     PetscCall(PetscInfo(pc, "It is best to tune parameters, e.g., a higher value for -%spc_hpddm_levels_%" PetscInt_FMT "_eps_threshold so that at least one local deflation vector will be selected\n", pcpre ? pcpre : "", data->N));
1537     /* cannot use PCDestroy_HPDDMShell() because PCSHELL not set for unassembled levels */
1538     for (n = data->N - 1; n < requested - 1; ++n) {
1539       if (data->levels[n]->P) {
1540         PetscCall(HPDDM::Schwarz<PetscScalar>::destroy(data->levels[n], PETSC_TRUE));
1541         PetscCall(VecDestroyVecs(1, &data->levels[n]->v[0]));
1542         PetscCall(VecDestroyVecs(2, &data->levels[n]->v[1]));
1543         PetscCall(MatDestroy(data->levels[n]->V));
1544         PetscCall(MatDestroy(data->levels[n]->V + 1));
1545         PetscCall(MatDestroy(data->levels[n]->V + 2));
1546         PetscCall(VecDestroy(&data->levels[n]->D));
1547         PetscCall(VecScatterDestroy(&data->levels[n]->scatter));
1548       }
1549     }
1550     if (reused) {
1551       for (n = reused; n < PETSC_PCHPDDM_MAXLEVELS && data->levels[n]; ++n) {
1552         PetscCall(KSPDestroy(&data->levels[n]->ksp));
1553         PetscCall(PCDestroy(&data->levels[n]->pc));
1554       }
1555     }
1556     PetscCheck(!PetscDefined(USE_DEBUG), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " levels requested, only %" PetscInt_FMT " built + %" PetscInt_FMT " reused. Options for level(s) > %" PetscInt_FMT ", including -%spc_hpddm_coarse_ will not be taken into account. It is best to tune parameters, e.g., a higher value for -%spc_hpddm_levels_%" PetscInt_FMT "_eps_threshold so that at least one local deflation vector will be selected. If you don't want this to error out, compile --with-debugging=0", requested,
1557                data->N, reused, data->N, pcpre ? pcpre : "", pcpre ? pcpre : "", data->N);
1558   }
1559   /* these solvers are created after PCSetFromOptions() is called */
1560   if (pc->setfromoptionscalled) {
1561     for (n = 0; n < data->N; ++n) {
1562       if (data->levels[n]->ksp) PetscCall(KSPSetFromOptions(data->levels[n]->ksp));
1563       if (data->levels[n]->pc) PetscCall(PCSetFromOptions(data->levels[n]->pc));
1564     }
1565     pc->setfromoptionscalled = 0;
1566   }
1567   data->N += reused;
1568   if (data->share && swap) {
1569     /* swap back pointers so that variables follow the user-provided numbering */
1570     std::swap(C, data->aux);
1571     std::swap(uis, data->is);
1572     PetscCall(MatDestroy(&C));
1573     PetscCall(ISDestroy(&uis));
1574   }
1575   PetscFunctionReturn(0);
1576 }
1577 
1578 /*@
1579      PCHPDDMSetCoarseCorrectionType - Sets the coarse correction type.
1580 
1581    Collective
1582 
1583    Input Parameters:
1584 +     pc - preconditioner context
1585 -     type - `PC_HPDDM_COARSE_CORRECTION_DEFLATED`, `PC_HPDDM_COARSE_CORRECTION_ADDITIVE`, or `PC_HPDDM_COARSE_CORRECTION_BALANCED`
1586 
1587    Options Database Key:
1588 .   -pc_hpddm_coarse_correction <deflated, additive, balanced> - type of coarse correction to apply
1589 
1590    Level: intermediate
1591 
1592 .seealso: `PCHPDDMGetCoarseCorrectionType()`, `PCHPDDM`, `PCHPDDMCoarseCorrectionType`
1593 @*/
1594 PetscErrorCode PCHPDDMSetCoarseCorrectionType(PC pc, PCHPDDMCoarseCorrectionType type)
1595 {
1596   PetscFunctionBegin;
1597   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1598   PetscValidLogicalCollectiveEnum(pc, type, 2);
1599   PetscTryMethod(pc, "PCHPDDMSetCoarseCorrectionType_C", (PC, PCHPDDMCoarseCorrectionType), (pc, type));
1600   PetscFunctionReturn(0);
1601 }
1602 
1603 /*@
1604      PCHPDDMGetCoarseCorrectionType - Gets the coarse correction type.
1605 
1606    Input Parameter:
1607 .     pc - preconditioner context
1608 
1609    Output Parameter:
1610 .     type - `PC_HPDDM_COARSE_CORRECTION_DEFLATED`, `PC_HPDDM_COARSE_CORRECTION_ADDITIVE`, or `PC_HPDDM_COARSE_CORRECTION_BALANCED`
1611 
1612    Level: intermediate
1613 
1614 .seealso: `PCHPDDMSetCoarseCorrectionType()`, `PCHPDDM`, `PCHPDDMCoarseCorrectionType`
1615 @*/
1616 PetscErrorCode PCHPDDMGetCoarseCorrectionType(PC pc, PCHPDDMCoarseCorrectionType *type)
1617 {
1618   PetscFunctionBegin;
1619   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1620   if (type) {
1621     PetscValidPointer(type, 2);
1622     PetscUseMethod(pc, "PCHPDDMGetCoarseCorrectionType_C", (PC, PCHPDDMCoarseCorrectionType *), (pc, type));
1623   }
1624   PetscFunctionReturn(0);
1625 }
1626 
1627 static PetscErrorCode PCHPDDMSetCoarseCorrectionType_HPDDM(PC pc, PCHPDDMCoarseCorrectionType type)
1628 {
1629   PC_HPDDM *data = (PC_HPDDM *)pc->data;
1630 
1631   PetscFunctionBegin;
1632   PetscCheck(type >= 0 && type <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PCHPDDMCoarseCorrectionType %d", type);
1633   data->correction = type;
1634   PetscFunctionReturn(0);
1635 }
1636 
1637 static PetscErrorCode PCHPDDMGetCoarseCorrectionType_HPDDM(PC pc, PCHPDDMCoarseCorrectionType *type)
1638 {
1639   PC_HPDDM *data = (PC_HPDDM *)pc->data;
1640 
1641   PetscFunctionBegin;
1642   *type = data->correction;
1643   PetscFunctionReturn(0);
1644 }
1645 
1646 /*@
1647      PCHPDDMSetSTShareSubKSP - Sets whether the `KSP` in SLEPc `ST` and the fine-level subdomain solver should be shared.
1648 
1649    Input Parameters:
1650 +     pc - preconditioner context
1651 -     share - whether the `KSP` should be shared or not
1652 
1653    Note:
1654      This is not the same as `PCSetReusePreconditioner()`. Given certain conditions (visible using -info), a symbolic factorization can be skipped
1655      when using a subdomain `PCType` such as `PCLU` or `PCCHOLESKY`.
1656 
1657    Level: advanced
1658 
1659 .seealso: `PCHPDDM`, `PCHPDDMGetSTShareSubKSP()`
1660 @*/
1661 PetscErrorCode PCHPDDMSetSTShareSubKSP(PC pc, PetscBool share)
1662 {
1663   PetscFunctionBegin;
1664   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1665   PetscTryMethod(pc, "PCHPDDMSetSTShareSubKSP_C", (PC, PetscBool), (pc, share));
1666   PetscFunctionReturn(0);
1667 }
1668 
1669 /*@
1670      PCHPDDMGetSTShareSubKSP - Gets whether the `KSP` in SLEPc `ST` and the fine-level subdomain solver is shared.
1671 
1672    Input Parameter:
1673 .     pc - preconditioner context
1674 
1675    Output Parameter:
1676 .     share - whether the `KSP` is shared or not
1677 
1678    Note:
1679      This is not the same as `PCGetReusePreconditioner()`. The return value is unlikely to be true, but when it is, a symbolic factorization can be skipped
1680      when using a subdomain `PCType` such as `PCLU` or `PCCHOLESKY`.
1681 
1682    Level: advanced
1683 
1684 .seealso: `PCHPDDM`, `PCHPDDMSetSTShareSubKSP()`
1685 @*/
1686 PetscErrorCode PCHPDDMGetSTShareSubKSP(PC pc, PetscBool *share)
1687 {
1688   PetscFunctionBegin;
1689   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1690   if (share) {
1691     PetscValidBoolPointer(share, 2);
1692     PetscUseMethod(pc, "PCHPDDMGetSTShareSubKSP_C", (PC, PetscBool *), (pc, share));
1693   }
1694   PetscFunctionReturn(0);
1695 }
1696 
1697 static PetscErrorCode PCHPDDMSetSTShareSubKSP_HPDDM(PC pc, PetscBool share)
1698 {
1699   PC_HPDDM *data = (PC_HPDDM *)pc->data;
1700 
1701   PetscFunctionBegin;
1702   data->share = share;
1703   PetscFunctionReturn(0);
1704 }
1705 
1706 static PetscErrorCode PCHPDDMGetSTShareSubKSP_HPDDM(PC pc, PetscBool *share)
1707 {
1708   PC_HPDDM *data = (PC_HPDDM *)pc->data;
1709 
1710   PetscFunctionBegin;
1711   *share = data->share;
1712   PetscFunctionReturn(0);
1713 }
1714 
1715 /*@
1716      PCHPDDMSetDeflationMat - Sets the deflation space used to assemble a coarser operator.
1717 
1718    Input Parameters:
1719 +     pc - preconditioner context
1720 .     is - index set of the local deflation matrix
1721 -     U - deflation sequential matrix stored as a `MATSEQDENSE`
1722 
1723    Level: advanced
1724 
1725 .seealso: `PCHPDDM`, `PCDeflationSetSpace()`, `PCMGSetRestriction()`
1726 @*/
1727 PetscErrorCode PCHPDDMSetDeflationMat(PC pc, IS is, Mat U)
1728 {
1729   PetscFunctionBegin;
1730   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1731   PetscValidHeaderSpecific(is, IS_CLASSID, 2);
1732   PetscValidHeaderSpecific(U, MAT_CLASSID, 3);
1733   PetscTryMethod(pc, "PCHPDDMSetDeflationMat_C", (PC, IS, Mat), (pc, is, U));
1734   PetscFunctionReturn(0);
1735 }
1736 
1737 static PetscErrorCode PCHPDDMSetDeflationMat_HPDDM(PC pc, IS is, Mat U)
1738 {
1739   PetscFunctionBegin;
1740   PetscCall(PCHPDDMSetAuxiliaryMat_Private(pc, is, U, PETSC_TRUE));
1741   PetscFunctionReturn(0);
1742 }
1743 
1744 PetscErrorCode HPDDMLoadDL_Private(PetscBool *found)
1745 {
1746   char lib[PETSC_MAX_PATH_LEN], dlib[PETSC_MAX_PATH_LEN], dir[PETSC_MAX_PATH_LEN];
1747 
1748   PetscFunctionBegin;
1749   PetscValidBoolPointer(found, 1);
1750   PetscCall(PetscStrcpy(dir, "${PETSC_LIB_DIR}"));
1751   PetscCall(PetscOptionsGetString(NULL, NULL, "-hpddm_dir", dir, sizeof(dir), NULL));
1752   PetscCall(PetscSNPrintf(lib, sizeof(lib), "%s/libhpddm_petsc", dir));
1753   PetscCall(PetscDLLibraryRetrieve(PETSC_COMM_SELF, lib, dlib, 1024, found));
1754 #if defined(SLEPC_LIB_DIR) /* this variable is passed during SLEPc ./configure since    */
1755   if (!*found) {           /* slepcconf.h is not yet built (and thus can't be included) */
1756     PetscCall(PetscStrcpy(dir, HPDDM_STR(SLEPC_LIB_DIR)));
1757     PetscCall(PetscSNPrintf(lib, sizeof(lib), "%s/libhpddm_petsc", dir));
1758     PetscCall(PetscDLLibraryRetrieve(PETSC_COMM_SELF, lib, dlib, 1024, found));
1759   }
1760 #endif
1761   PetscCheck(*found, PETSC_COMM_SELF, PETSC_ERR_PLIB, "%s not found", lib);
1762   PetscCall(PetscDLLibraryAppend(PETSC_COMM_SELF, &PetscDLLibrariesLoaded, dlib));
1763   PetscFunctionReturn(0);
1764 }
1765 
1766 /*MC
1767      PCHPDDM - Interface with the HPDDM library.
1768 
1769    This `PC` may be used to build multilevel spectral domain decomposition methods based on the GenEO framework [2011, 2019]. It may be viewed as an alternative to spectral
1770    AMGe or `PCBDDC` with adaptive selection of constraints. The interface is explained in details in [2021] (see references below)
1771 
1772    The matrix to be preconditioned (Pmat) may be unassembled (`MATIS`), assembled (`MATAIJ`, `MATBAIJ`, or `MATSBAIJ`), hierarchical (`MATHTOOL`), or `MATNORMAL`.
1773 
1774    For multilevel preconditioning, when using an assembled or hierarchical Pmat, one must provide an auxiliary local `Mat` (unassembled local operator for GenEO) using
1775    `PCHPDDMSetAuxiliaryMat()`. Calling this routine is not needed when using a `MATIS` Pmat, assembly is done internally using `MatConvert()`.
1776 
1777    Options Database Keys:
1778 +   -pc_hpddm_define_subdomains <true, default=false> - on the finest level, calls `PCASMSetLocalSubdomains()` with the `IS` supplied in `PCHPDDMSetAuxiliaryMat()`
1779     (not relevant with an unassembled Pmat)
1780 .   -pc_hpddm_has_neumann <true, default=false> - on the finest level, informs the `PC` that the local Neumann matrix is supplied in `PCHPDDMSetAuxiliaryMat()`
1781 -   -pc_hpddm_coarse_correction <type, default=deflated> - determines the `PCHPDDMCoarseCorrectionType` when calling `PCApply()`
1782 
1783    Options for subdomain solvers, subdomain eigensolvers (for computing deflation vectors), and the coarse solver can be set using the following options database prefixes.
1784 .vb
1785       -pc_hpddm_levels_%d_pc_
1786       -pc_hpddm_levels_%d_ksp_
1787       -pc_hpddm_levels_%d_eps_
1788       -pc_hpddm_levels_%d_p
1789       -pc_hpddm_levels_%d_mat_type
1790       -pc_hpddm_coarse_
1791       -pc_hpddm_coarse_p
1792       -pc_hpddm_coarse_mat_type
1793       -pc_hpddm_coarse_mat_chop
1794 .ve
1795 
1796    E.g., -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_2_p 4 -pc_hpddm_levels_2_sub_pc_type lu -pc_hpddm_levels_2_eps_nev 10
1797     -pc_hpddm_coarse_p 2 -pc_hpddm_coarse_mat_type baij will use 10 deflation vectors per subdomain on the fine "level 1",
1798     aggregate the fine subdomains into 4 "level 2" subdomains, then use 10 deflation vectors per subdomain on "level 2",
1799     and assemble the coarse matrix (of dimension 4 x 10 = 40) on two processes as a `MATBAIJ` (default is `MATSBAIJ`).
1800 
1801    In order to activate a "level N+1" coarse correction, it is mandatory to call -pc_hpddm_levels_N_eps_nev <nu> or -pc_hpddm_levels_N_eps_threshold <val>. The default -pc_hpddm_coarse_p value is 1, meaning that the coarse operator is aggregated on a single process.
1802 
1803    This preconditioner requires that you build PETSc with SLEPc (``--download-slepc``). By default, the underlying concurrent eigenproblems
1804    are solved using SLEPc shift-and-invert spectral transformation. This is usually what gives the best performance for GenEO, cf. [2011, 2013]. As
1805    stated above, SLEPc options are available through -pc_hpddm_levels_%d_, e.g., -pc_hpddm_levels_1_eps_type arpack -pc_hpddm_levels_1_eps_nev 10
1806    -pc_hpddm_levels_1_st_type sinvert. There are furthermore three options related to the (subdomain-wise local) eigensolver that are not described in
1807    SLEPc documentation since they are specific to `PCHPDDM`.
1808 .vb
1809       -pc_hpddm_levels_1_st_share_sub_ksp
1810       -pc_hpddm_levels_%d_eps_threshold
1811       -pc_hpddm_levels_1_eps_use_inertia
1812 .ve
1813 
1814    The first option from the list only applies to the fine-level eigensolver, see `PCHPDDMSetSTShareSubKSP()`. The second option from the list is
1815    used to filter eigenmodes retrieved after convergence of `EPSSolve()` at "level N" such that eigenvectors used to define a "level N+1" coarse
1816    correction are associated to eigenvalues whose magnitude are lower or equal than -pc_hpddm_levels_N_eps_threshold. When using an `EPS` which cannot
1817    determine a priori the proper -pc_hpddm_levels_N_eps_nev such that all wanted eigenmodes are retrieved, it is possible to get an estimation of the
1818    correct value using the third option from the list, -pc_hpddm_levels_1_eps_use_inertia, see `MatGetInertia()`. In that case, there is no need
1819    to supply -pc_hpddm_levels_1_eps_nev. This last option also only applies to the fine-level (N = 1) eigensolver.
1820 
1821    References:
1822 +   2011 - A robust two-level domain decomposition preconditioner for systems of PDEs. Spillane, Dolean, Hauret, Nataf, Pechstein, and Scheichl. Comptes Rendus Mathematique.
1823 .   2013 - Scalable domain decomposition preconditioners for heterogeneous elliptic problems. Jolivet, Hecht, Nataf, and Prud'homme. SC13.
1824 .   2015 - An introduction to domain decomposition methods: algorithms, theory, and parallel implementation. Dolean, Jolivet, and Nataf. SIAM.
1825 .   2019 - A multilevel Schwarz preconditioner based on a hierarchy of robust coarse spaces. Al Daas, Grigori, Jolivet, and Tournier. SIAM Journal on Scientific Computing.
1826 .   2021 - KSPHPDDM and PCHPDDM: extending PETSc with advanced Krylov methods and robust multilevel overlapping Schwarz preconditioners. Jolivet, Roman, and Zampini. Computer & Mathematics with Applications.
1827 .   2022a - A robust algebraic domain decomposition preconditioner for sparse normal equations. Al Daas, Jolivet, and Scott. SIAM Journal on Scientific Computing.
1828 -   2022b - A robust algebraic multilevel domain decomposition preconditioner for sparse symmetric positive definite matrices. Al Daas and Jolivet.
1829 
1830    Level: intermediate
1831 
1832 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHPDDMSetAuxiliaryMat()`, `MATIS`, `PCBDDC`, `PCDEFLATION`, `PCTELESCOPE`, `PCASM`,
1833           `PCHPDDMSetCoarseCorrectionType()`, `PCHPDDMHasNeumannMat()`, `PCHPDDMSetRHSMat()`, `PCHPDDMSetDeflationMat()`, `PCHPDDMSetSTShareSubKSP()`,
1834           `PCHPDDMGetSTShareSubKSP()`, `PCHPDDMGetCoarseCorrectionType()`, `PCHPDDMGetComplexities()`
1835 M*/
1836 PETSC_EXTERN PetscErrorCode PCCreate_HPDDM(PC pc)
1837 {
1838   PC_HPDDM *data;
1839   PetscBool found;
1840 
1841   PetscFunctionBegin;
1842   if (!loadedSym) {
1843     PetscCall(HPDDMLoadDL_Private(&found));
1844     if (found) PetscCall(PetscDLLibrarySym(PETSC_COMM_SELF, &PetscDLLibrariesLoaded, NULL, "PCHPDDM_Internal", (void **)&loadedSym));
1845   }
1846   PetscCheck(loadedSym, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCHPDDM_Internal symbol not found in loaded libhpddm_petsc");
1847   PetscCall(PetscNew(&data));
1848   pc->data                = data;
1849   data->Neumann           = PETSC_BOOL3_UNKNOWN;
1850   pc->ops->reset          = PCReset_HPDDM;
1851   pc->ops->destroy        = PCDestroy_HPDDM;
1852   pc->ops->setfromoptions = PCSetFromOptions_HPDDM;
1853   pc->ops->setup          = PCSetUp_HPDDM;
1854   pc->ops->apply          = PCApply_HPDDM;
1855   pc->ops->matapply       = PCMatApply_HPDDM;
1856   pc->ops->view           = PCView_HPDDM;
1857   pc->ops->presolve       = PCPreSolve_HPDDM;
1858 
1859   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetAuxiliaryMat_C", PCHPDDMSetAuxiliaryMat_HPDDM));
1860   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMHasNeumannMat_C", PCHPDDMHasNeumannMat_HPDDM));
1861   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetRHSMat_C", PCHPDDMSetRHSMat_HPDDM));
1862   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetCoarseCorrectionType_C", PCHPDDMSetCoarseCorrectionType_HPDDM));
1863   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMGetCoarseCorrectionType_C", PCHPDDMGetCoarseCorrectionType_HPDDM));
1864   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetSTShareSubKSP_C", PCHPDDMSetSTShareSubKSP_HPDDM));
1865   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMGetSTShareSubKSP_C", PCHPDDMGetSTShareSubKSP_HPDDM));
1866   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetDeflationMat_C", PCHPDDMSetDeflationMat_HPDDM));
1867   PetscFunctionReturn(0);
1868 }
1869 
1870 /*@C
1871      PCHPDDMInitializePackage - This function initializes everything in the `PCHPDDM` package. It is called from `PCInitializePackage()`.
1872 
1873    Level: developer
1874 
1875 .seealso: `PetscInitialize()`
1876 @*/
1877 PetscErrorCode PCHPDDMInitializePackage(void)
1878 {
1879   char     ename[32];
1880   PetscInt i;
1881 
1882   PetscFunctionBegin;
1883   if (PCHPDDMPackageInitialized) PetscFunctionReturn(0);
1884   PCHPDDMPackageInitialized = PETSC_TRUE;
1885   PetscCall(PetscRegisterFinalize(PCHPDDMFinalizePackage));
1886   /* general events registered once during package initialization */
1887   /* some of these events are not triggered in libpetsc,          */
1888   /* but rather directly in libhpddm_petsc,                       */
1889   /* which is in charge of performing the following operations    */
1890 
1891   /* domain decomposition structure from Pmat sparsity pattern    */
1892   PetscCall(PetscLogEventRegister("PCHPDDMStrc", PC_CLASSID, &PC_HPDDM_Strc));
1893   /* Galerkin product, redistribution, and setup (not triggered in libpetsc)                */
1894   PetscCall(PetscLogEventRegister("PCHPDDMPtAP", PC_CLASSID, &PC_HPDDM_PtAP));
1895   /* Galerkin product with summation, redistribution, and setup (not triggered in libpetsc) */
1896   PetscCall(PetscLogEventRegister("PCHPDDMPtBP", PC_CLASSID, &PC_HPDDM_PtBP));
1897   /* next level construction using PtAP and PtBP (not triggered in libpetsc)                */
1898   PetscCall(PetscLogEventRegister("PCHPDDMNext", PC_CLASSID, &PC_HPDDM_Next));
1899   static_assert(PETSC_PCHPDDM_MAXLEVELS <= 9, "PETSC_PCHPDDM_MAXLEVELS value is too high");
1900   for (i = 1; i < PETSC_PCHPDDM_MAXLEVELS; ++i) {
1901     PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCHPDDMSetUp L%1" PetscInt_FMT, i));
1902     /* events during a PCSetUp() at level #i _except_ the assembly */
1903     /* of the Galerkin operator of the coarser level #(i + 1)      */
1904     PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_HPDDM_SetUp[i - 1]));
1905     PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCHPDDMSolve L%1" PetscInt_FMT, i));
1906     /* events during a PCApply() at level #i _except_              */
1907     /* the KSPSolve() of the coarser level #(i + 1)                */
1908     PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_HPDDM_Solve[i - 1]));
1909   }
1910   PetscFunctionReturn(0);
1911 }
1912 
1913 /*@C
1914      PCHPDDMFinalizePackage - This function frees everything from the `PCHPDDM` package. It is called from `PetscFinalize()`.
1915 
1916    Level: developer
1917 
1918 .seealso: `PetscFinalize()`
1919 @*/
1920 PetscErrorCode PCHPDDMFinalizePackage(void)
1921 {
1922   PetscFunctionBegin;
1923   PCHPDDMPackageInitialized = PETSC_FALSE;
1924   PetscFunctionReturn(0);
1925 }
1926