xref: /petsc/src/ksp/pc/impls/hpddm/pchpddm.cxx (revision a68bbae58a07f2fb515cab24a67de1159d72e8a2)
1 #include <petsc/private/dmimpl.h>
2 #include <petsc/private/matimpl.h>
3 #include <petsc/private/petschpddm.h> /*I "petscpc.h" I*/
4 #include <petsc/private/pcimpl.h>     /* this must be included after petschpddm.h so that _PCIMPL_H is not defined            */
5                                       /* otherwise, it is assumed that one is compiling libhpddm_petsc => circular dependency */
6 #if PetscDefined(HAVE_FORTRAN)
7   #include <petsc/private/fortranimpl.h>
8 #endif
9 
10 static PetscErrorCode (*loadedSym)(HPDDM::Schwarz<PetscScalar> *const, IS, Mat, Mat, Mat, std::vector<Vec>, PC_HPDDM_Level **const) = nullptr;
11 
12 static PetscBool PCHPDDMPackageInitialized = PETSC_FALSE;
13 
14 PetscLogEvent PC_HPDDM_Strc;
15 PetscLogEvent PC_HPDDM_PtAP;
16 PetscLogEvent PC_HPDDM_PtBP;
17 PetscLogEvent PC_HPDDM_Next;
18 PetscLogEvent PC_HPDDM_SetUp[PETSC_PCHPDDM_MAXLEVELS];
19 PetscLogEvent PC_HPDDM_Solve[PETSC_PCHPDDM_MAXLEVELS];
20 
21 const char *const PCHPDDMCoarseCorrectionTypes[] = {"DEFLATED", "ADDITIVE", "BALANCED", "PCHPDDMCoarseCorrectionType", "PC_HPDDM_COARSE_CORRECTION_", nullptr};
22 
23 static PetscErrorCode PCReset_HPDDM(PC pc)
24 {
25   PC_HPDDM *data = (PC_HPDDM *)pc->data;
26   PetscInt  i;
27 
28   PetscFunctionBegin;
29   if (data->levels) {
30     for (i = 0; i < PETSC_PCHPDDM_MAXLEVELS && data->levels[i]; ++i) {
31       PetscCall(KSPDestroy(&data->levels[i]->ksp));
32       PetscCall(PCDestroy(&data->levels[i]->pc));
33       PetscCall(PetscFree(data->levels[i]));
34     }
35     PetscCall(PetscFree(data->levels));
36   }
37 
38   PetscCall(ISDestroy(&data->is));
39   PetscCall(MatDestroy(&data->aux));
40   PetscCall(MatDestroy(&data->B));
41   PetscCall(VecDestroy(&data->normal));
42   data->correction = PC_HPDDM_COARSE_CORRECTION_DEFLATED;
43   data->Neumann    = PETSC_BOOL3_UNKNOWN;
44   data->deflation  = PETSC_FALSE;
45   data->setup      = nullptr;
46   data->setup_ctx  = nullptr;
47   PetscFunctionReturn(PETSC_SUCCESS);
48 }
49 
50 static PetscErrorCode PCDestroy_HPDDM(PC pc)
51 {
52   PC_HPDDM *data = (PC_HPDDM *)pc->data;
53 
54   PetscFunctionBegin;
55   PetscCall(PCReset_HPDDM(pc));
56   PetscCall(PetscFree(data));
57   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, nullptr));
58   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetAuxiliaryMat_C", nullptr));
59   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMHasNeumannMat_C", nullptr));
60   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetRHSMat_C", nullptr));
61   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetCoarseCorrectionType_C", nullptr));
62   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMGetCoarseCorrectionType_C", nullptr));
63   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetSTShareSubKSP_C", nullptr));
64   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMGetSTShareSubKSP_C", nullptr));
65   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetDeflationMat_C", nullptr));
66   PetscFunctionReturn(PETSC_SUCCESS);
67 }
68 
69 static inline PetscErrorCode PCHPDDMSetAuxiliaryMat_Private(PC pc, IS is, Mat A, PetscBool deflation)
70 {
71   PC_HPDDM *data = (PC_HPDDM *)pc->data;
72 
73   PetscFunctionBegin;
74   if (is) {
75     PetscCall(PetscObjectReference((PetscObject)is));
76     if (data->is) { /* new overlap definition resets the PC */
77       PetscCall(PCReset_HPDDM(pc));
78       pc->setfromoptionscalled = 0;
79       pc->setupcalled          = 0;
80     }
81     PetscCall(ISDestroy(&data->is));
82     data->is = is;
83   }
84   if (A) {
85     PetscCall(PetscObjectReference((PetscObject)A));
86     PetscCall(MatDestroy(&data->aux));
87     data->aux = A;
88   }
89   data->deflation = deflation;
90   PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 
93 static inline PetscErrorCode PCHPDDMSetAuxiliaryMatNormal_Private(PC pc, Mat A, Mat N, Mat *B, const char *pcpre)
94 {
95   PC_HPDDM *data = (PC_HPDDM *)pc->data;
96   Mat      *splitting, *sub, aux;
97   IS        is, cols[2], rows;
98   PetscReal norm;
99   PetscBool flg;
100   char      type[256] = {}; /* same size as in src/ksp/pc/interface/pcset.c */
101 
102   PetscFunctionBegin;
103   PetscCall(MatConvert(N, MATAIJ, MAT_INITIAL_MATRIX, B));
104   PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->n, A->cmap->rstart, 1, cols));
105   PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->N, 0, 1, &rows));
106   PetscCall(MatSetOption(A, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
107   PetscCall(MatIncreaseOverlap(*B, 1, cols, 1));
108   PetscCall(MatCreateSubMatrices(A, 1, &rows, cols, MAT_INITIAL_MATRIX, &splitting));
109   PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->n, A->cmap->rstart, 1, &is));
110   PetscCall(ISEmbed(*cols, is, PETSC_TRUE, cols + 1));
111   PetscCall(ISDestroy(&is));
112   PetscCall(MatCreateSubMatrices(*splitting, 1, &rows, cols + 1, MAT_INITIAL_MATRIX, &sub));
113   PetscCall(ISDestroy(cols + 1));
114   PetscCall(MatFindZeroRows(*sub, &is));
115   PetscCall(MatDestroySubMatrices(1, &sub));
116   PetscCall(ISDestroy(&rows));
117   PetscCall(MatSetOption(*splitting, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
118   PetscCall(MatZeroRowsIS(*splitting, is, 0.0, nullptr, nullptr));
119   PetscCall(ISDestroy(&is));
120   PetscCall(PetscOptionsGetString(nullptr, pcpre, "-pc_hpddm_levels_1_sub_pc_type", type, sizeof(type), nullptr));
121   PetscCall(PetscStrcmp(type, PCQR, &flg));
122   if (!flg) {
123     Mat conjugate = *splitting;
124     if (PetscDefined(USE_COMPLEX)) {
125       PetscCall(MatDuplicate(*splitting, MAT_COPY_VALUES, &conjugate));
126       PetscCall(MatConjugate(conjugate));
127     }
128     PetscCall(MatTransposeMatMult(conjugate, *splitting, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &aux));
129     if (PetscDefined(USE_COMPLEX)) PetscCall(MatDestroy(&conjugate));
130     PetscCall(MatNorm(aux, NORM_FROBENIUS, &norm));
131     PetscCall(MatSetOption(aux, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
132     PetscCall(MatShift(aux, PETSC_SMALL * norm));
133   } else {
134     PetscBool flg;
135     PetscCall(PetscObjectTypeCompare((PetscObject)N, MATNORMAL, &flg));
136     if (flg) PetscCall(MatCreateNormal(*splitting, &aux));
137     else PetscCall(MatCreateNormalHermitian(*splitting, &aux));
138   }
139   PetscCall(MatDestroySubMatrices(1, &splitting));
140   PetscCall(PCHPDDMSetAuxiliaryMat(pc, *cols, aux, nullptr, nullptr));
141   data->Neumann = PETSC_BOOL3_TRUE;
142   PetscCall(ISDestroy(cols));
143   PetscCall(MatDestroy(&aux));
144   PetscFunctionReturn(PETSC_SUCCESS);
145 }
146 
147 static PetscErrorCode PCHPDDMSetAuxiliaryMat_HPDDM(PC pc, IS is, Mat A, PetscErrorCode (*setup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void *setup_ctx)
148 {
149   PC_HPDDM *data = (PC_HPDDM *)pc->data;
150 
151   PetscFunctionBegin;
152   PetscCall(PCHPDDMSetAuxiliaryMat_Private(pc, is, A, PETSC_FALSE));
153   if (setup) {
154     data->setup     = setup;
155     data->setup_ctx = setup_ctx;
156   }
157   PetscFunctionReturn(PETSC_SUCCESS);
158 }
159 
160 /*@
161      PCHPDDMSetAuxiliaryMat - Sets the auxiliary matrix used by `PCHPDDM` for the concurrent GenEO problems at the finest level. As an example, in a finite element context with nonoverlapping subdomains plus (overlapping) ghost elements, this could be the unassembled (Neumann) local overlapping operator. As opposed to the assembled (Dirichlet) local overlapping operator obtained by summing neighborhood contributions at the interface of ghost elements.
162 
163    Input Parameters:
164 +     pc - preconditioner context
165 .     is - index set of the local auxiliary, e.g., Neumann, matrix
166 .     A - auxiliary sequential matrix
167 .     setup - function for generating the auxiliary matrix
168 -     setup_ctx - context for setup
169 
170    Level: intermediate
171 
172 .seealso: `PCHPDDM`, `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHPDDMSetRHSMat()`, `MATIS`
173 @*/
174 PetscErrorCode PCHPDDMSetAuxiliaryMat(PC pc, IS is, Mat A, PetscErrorCode (*setup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void *setup_ctx)
175 {
176   PetscFunctionBegin;
177   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
178   if (is) PetscValidHeaderSpecific(is, IS_CLASSID, 2);
179   if (A) PetscValidHeaderSpecific(A, MAT_CLASSID, 3);
180 #if PetscDefined(HAVE_FORTRAN)
181   if (reinterpret_cast<void *>(setup) == reinterpret_cast<void *>(PETSC_NULL_FUNCTION_Fortran)) setup = nullptr;
182   if (setup_ctx == PETSC_NULL_INTEGER_Fortran) setup_ctx = nullptr;
183 #endif
184   PetscTryMethod(pc, "PCHPDDMSetAuxiliaryMat_C", (PC, IS, Mat, PetscErrorCode(*)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void *), (pc, is, A, setup, setup_ctx));
185   PetscFunctionReturn(PETSC_SUCCESS);
186 }
187 
188 static PetscErrorCode PCHPDDMHasNeumannMat_HPDDM(PC pc, PetscBool has)
189 {
190   PC_HPDDM *data = (PC_HPDDM *)pc->data;
191 
192   PetscFunctionBegin;
193   data->Neumann = PetscBoolToBool3(has);
194   PetscFunctionReturn(PETSC_SUCCESS);
195 }
196 
197 /*@
198      PCHPDDMHasNeumannMat - Informs `PCHPDDM` that the `Mat` passed to `PCHPDDMSetAuxiliaryMat()` is the local Neumann matrix.
199 
200    Input Parameters:
201 +     pc - preconditioner context
202 -     has - Boolean value
203 
204    Level: intermediate
205 
206    Notes:
207    This may be used to bypass a call to `MatCreateSubMatrices()` and to `MatConvert()` for `MATSBAIJ` matrices.
208 
209    If a `DMCreateNeumannOverlap()` implementation is available in the `DM` attached to the Pmat, or the Amat, or the `PC`, the flag is internally set to `PETSC_TRUE`. Its default value is otherwise `PETSC_FALSE`.
210 
211 .seealso: `PCHPDDM`, `PCHPDDMSetAuxiliaryMat()`
212 @*/
213 PetscErrorCode PCHPDDMHasNeumannMat(PC pc, PetscBool has)
214 {
215   PetscFunctionBegin;
216   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
217   PetscTryMethod(pc, "PCHPDDMHasNeumannMat_C", (PC, PetscBool), (pc, has));
218   PetscFunctionReturn(PETSC_SUCCESS);
219 }
220 
221 static PetscErrorCode PCHPDDMSetRHSMat_HPDDM(PC pc, Mat B)
222 {
223   PC_HPDDM *data = (PC_HPDDM *)pc->data;
224 
225   PetscFunctionBegin;
226   PetscCall(PetscObjectReference((PetscObject)B));
227   PetscCall(MatDestroy(&data->B));
228   data->B = B;
229   PetscFunctionReturn(PETSC_SUCCESS);
230 }
231 
232 /*@
233      PCHPDDMSetRHSMat - Sets the right-hand side matrix used by `PCHPDDM` for the concurrent GenEO problems at the finest level. Must be used in conjunction with `PCHPDDMSetAuxiliaryMat`(N), so that Nv = lambda Bv is solved using `EPSSetOperators`(N, B). It is assumed that N and B are provided using the same numbering. This provides a means to try more advanced methods such as GenEO-II or H-GenEO.
234 
235    Input Parameters:
236 +     pc - preconditioner context
237 -     B - right-hand side sequential matrix
238 
239    Level: advanced
240 
241 .seealso: `PCHPDDMSetAuxiliaryMat()`, `PCHPDDM`
242 @*/
243 PetscErrorCode PCHPDDMSetRHSMat(PC pc, Mat B)
244 {
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
247   if (B) {
248     PetscValidHeaderSpecific(B, MAT_CLASSID, 2);
249     PetscTryMethod(pc, "PCHPDDMSetRHSMat_C", (PC, Mat), (pc, B));
250   }
251   PetscFunctionReturn(PETSC_SUCCESS);
252 }
253 
254 static PetscErrorCode PCSetFromOptions_HPDDM(PC pc, PetscOptionItems *PetscOptionsObject)
255 {
256   PC_HPDDM                   *data   = (PC_HPDDM *)pc->data;
257   PC_HPDDM_Level            **levels = data->levels;
258   char                        prefix[256];
259   int                         i = 1;
260   PetscMPIInt                 size, previous;
261   PetscInt                    n;
262   PCHPDDMCoarseCorrectionType type;
263   PetscBool                   flg = PETSC_TRUE, set;
264 
265   PetscFunctionBegin;
266   if (!data->levels) {
267     PetscCall(PetscCalloc1(PETSC_PCHPDDM_MAXLEVELS, &levels));
268     data->levels = levels;
269   }
270   PetscOptionsHeadBegin(PetscOptionsObject, "PCHPDDM options");
271   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
272   previous = size;
273   while (i < PETSC_PCHPDDM_MAXLEVELS) {
274     PetscInt p = 1;
275 
276     if (!data->levels[i - 1]) PetscCall(PetscNew(data->levels + i - 1));
277     data->levels[i - 1]->parent = data;
278     /* if the previous level has a single process, it is not possible to coarsen further */
279     if (previous == 1 || !flg) break;
280     data->levels[i - 1]->nu        = 0;
281     data->levels[i - 1]->threshold = -1.0;
282     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_eps_nev", i));
283     PetscCall(PetscOptionsBoundedInt(prefix, "Local number of deflation vectors computed by SLEPc", "EPSSetDimensions", data->levels[i - 1]->nu, &data->levels[i - 1]->nu, nullptr, 0));
284     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_eps_threshold", i));
285     PetscCall(PetscOptionsReal(prefix, "Local threshold for selecting deflation vectors returned by SLEPc", "PCHPDDM", data->levels[i - 1]->threshold, &data->levels[i - 1]->threshold, nullptr));
286     if (i == 1) {
287       PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_1_st_share_sub_ksp"));
288       PetscCall(PetscOptionsBool(prefix, "Shared KSP between SLEPc ST and the fine-level subdomain solver", "PCHPDDMSetSTShareSubKSP", PETSC_FALSE, &data->share, nullptr));
289     }
290     /* if there is no prescribed coarsening, just break out of the loop */
291     if (data->levels[i - 1]->threshold <= 0.0 && data->levels[i - 1]->nu <= 0 && !(data->deflation && i == 1)) break;
292     else {
293       ++i;
294       PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_eps_nev", i));
295       PetscCall(PetscOptionsHasName(PetscOptionsObject->options, PetscOptionsObject->prefix, prefix, &flg));
296       if (!flg) {
297         PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_eps_threshold", i));
298         PetscCall(PetscOptionsHasName(PetscOptionsObject->options, PetscOptionsObject->prefix, prefix, &flg));
299       }
300       if (flg) {
301         /* if there are coarsening options for the next level, then register it  */
302         /* otherwise, don't to avoid having both options levels_N_p and coarse_p */
303         PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_levels_%d_p", i));
304         PetscCall(PetscOptionsRangeInt(prefix, "Number of processes used to assemble the coarse operator at this level", "PCHPDDM", p, &p, &flg, 1, PetscMax(1, previous / 2)));
305         previous = p;
306       }
307     }
308   }
309   data->N = i;
310   n       = 1;
311   if (i > 1) {
312     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_coarse_p"));
313     PetscCall(PetscOptionsRangeInt(prefix, "Number of processes used to assemble the coarsest operator", "PCHPDDM", n, &n, nullptr, 1, PetscMax(1, previous / 2)));
314 #if PetscDefined(HAVE_MUMPS)
315     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "pc_hpddm_coarse_"));
316     PetscCall(PetscOptionsHasName(nullptr, prefix, "-mat_mumps_use_omp_threads", &flg));
317     if (flg) {
318       char type[64];                                                                                                    /* same size as in src/ksp/pc/impls/factor/factimpl.c */
319       PetscCall(PetscStrncpy(type, n > 1 && PetscDefined(HAVE_MUMPS) ? MATSOLVERMUMPS : MATSOLVERPETSC, sizeof(type))); /* default solver for a MatMPIAIJ or a MatSeqAIJ */
320       PetscCall(PetscOptionsGetString(nullptr, prefix, "-pc_factor_mat_solver_type", type, sizeof(type), nullptr));
321       PetscCall(PetscStrcmp(type, MATSOLVERMUMPS, &flg));
322       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "-%smat_mumps_use_omp_threads and -%spc_factor_mat_solver_type != %s", prefix, prefix, MATSOLVERMUMPS);
323       size = n;
324       n    = -1;
325       PetscCall(PetscOptionsGetInt(nullptr, prefix, "-mat_mumps_use_omp_threads", &n, nullptr));
326       PetscCheck(n >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Need to specify a positive integer for -%smat_mumps_use_omp_threads", prefix);
327       PetscCheck(n * size <= previous, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%d MPI process%s x %d OpenMP thread%s greater than %d available MPI process%s for the coarsest operator", (int)size, size > 1 ? "es" : "", (int)n, n > 1 ? "s" : "", (int)previous, previous > 1 ? "es" : "");
328     }
329 #endif
330     PetscCall(PetscOptionsEnum("-pc_hpddm_coarse_correction", "Type of coarse correction applied each iteration", "PCHPDDMSetCoarseCorrectionType", PCHPDDMCoarseCorrectionTypes, (PetscEnum)data->correction, (PetscEnum *)&type, &flg));
331     if (flg) PetscCall(PCHPDDMSetCoarseCorrectionType(pc, type));
332     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_has_neumann"));
333     PetscCall(PetscOptionsBool(prefix, "Is the auxiliary Mat the local Neumann matrix?", "PCHPDDMHasNeumannMat", PetscBool3ToBool(data->Neumann), &flg, &set));
334     if (set) data->Neumann = PetscBoolToBool3(flg);
335     data->log_separate = PETSC_FALSE;
336     if (PetscDefined(USE_LOG)) {
337       PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-pc_hpddm_log_separate"));
338       PetscCall(PetscOptionsBool(prefix, "Log events level by level instead of inside PCSetUp()/KSPSolve()", nullptr, data->log_separate, &data->log_separate, nullptr));
339     }
340   }
341   PetscOptionsHeadEnd();
342   while (i < PETSC_PCHPDDM_MAXLEVELS && data->levels[i]) PetscCall(PetscFree(data->levels[i++]));
343   PetscFunctionReturn(PETSC_SUCCESS);
344 }
345 
346 static PetscErrorCode PCApply_HPDDM(PC pc, Vec x, Vec y)
347 {
348   PC_HPDDM *data = (PC_HPDDM *)pc->data;
349 
350   PetscFunctionBegin;
351   PetscCall(PetscCitationsRegister(HPDDMCitation, &HPDDMCite));
352   PetscCheck(data->levels[0]->ksp, PETSC_COMM_SELF, PETSC_ERR_PLIB, "No KSP attached to PCHPDDM");
353   if (data->log_separate) PetscCall(PetscLogEventBegin(PC_HPDDM_Solve[0], data->levels[0]->ksp, nullptr, nullptr, nullptr)); /* coarser-level events are directly triggered in HPDDM */
354   PetscCall(KSPSolve(data->levels[0]->ksp, x, y));
355   if (data->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_Solve[0], data->levels[0]->ksp, nullptr, nullptr, nullptr));
356   PetscFunctionReturn(PETSC_SUCCESS);
357 }
358 
359 static PetscErrorCode PCMatApply_HPDDM(PC pc, Mat X, Mat Y)
360 {
361   PC_HPDDM *data = (PC_HPDDM *)pc->data;
362 
363   PetscFunctionBegin;
364   PetscCall(PetscCitationsRegister(HPDDMCitation, &HPDDMCite));
365   PetscCheck(data->levels[0]->ksp, PETSC_COMM_SELF, PETSC_ERR_PLIB, "No KSP attached to PCHPDDM");
366   PetscCall(KSPMatSolve(data->levels[0]->ksp, X, Y));
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 /*@C
371      PCHPDDMGetComplexities - Computes the grid and operator complexities.
372 
373    Input Parameter:
374 .     pc - preconditioner context
375 
376    Output Parameters:
377 +     gc - grid complexity = sum_i(m_i) / m_1
378 -     oc - operator complexity = sum_i(nnz_i) / nnz_1
379 
380    Level: advanced
381 
382 .seealso: `PCMGGetGridComplexity()`, `PCHPDDM`, `PCHYPRE`, `PCGAMG`
383 @*/
384 static PetscErrorCode PCHPDDMGetComplexities(PC pc, PetscReal *gc, PetscReal *oc)
385 {
386   PC_HPDDM      *data = (PC_HPDDM *)pc->data;
387   MatInfo        info;
388   PetscInt       n, m;
389   PetscLogDouble accumulate[2]{}, nnz1 = 1.0, m1 = 1.0;
390 
391   PetscFunctionBegin;
392   for (n = 0, *gc = 0, *oc = 0; n < data->N; ++n) {
393     if (data->levels[n]->ksp) {
394       Mat P, A;
395       PetscCall(KSPGetOperators(data->levels[n]->ksp, nullptr, &P));
396       PetscCall(MatGetSize(P, &m, nullptr));
397       accumulate[0] += m;
398       if (n == 0) {
399         PetscBool flg;
400         PetscCall(PetscObjectTypeCompareAny((PetscObject)P, &flg, MATNORMAL, MATNORMALHERMITIAN, ""));
401         if (flg) {
402           PetscCall(MatConvert(P, MATAIJ, MAT_INITIAL_MATRIX, &A));
403           P = A;
404         } else PetscCall(PetscObjectReference((PetscObject)P));
405       }
406       if (P->ops->getinfo) {
407         PetscCall(MatGetInfo(P, MAT_GLOBAL_SUM, &info));
408         accumulate[1] += info.nz_used;
409       }
410       if (n == 0) {
411         m1 = m;
412         if (P->ops->getinfo) nnz1 = info.nz_used;
413         PetscCall(MatDestroy(&P));
414       }
415     }
416   }
417   *gc = static_cast<PetscReal>(accumulate[0] / m1);
418   *oc = static_cast<PetscReal>(accumulate[1] / nnz1);
419   PetscFunctionReturn(PETSC_SUCCESS);
420 }
421 
422 static PetscErrorCode PCView_HPDDM(PC pc, PetscViewer viewer)
423 {
424   PC_HPDDM    *data = (PC_HPDDM *)pc->data;
425   PetscViewer  subviewer;
426   PetscSubcomm subcomm;
427   PetscReal    oc, gc;
428   PetscInt     i, tabs;
429   PetscMPIInt  size, color, rank;
430   PetscBool    ascii;
431 
432   PetscFunctionBegin;
433   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &ascii));
434   if (ascii) {
435     PetscCall(PetscViewerASCIIPrintf(viewer, "level%s: %" PetscInt_FMT "\n", data->N > 1 ? "s" : "", data->N));
436     PetscCall(PCHPDDMGetComplexities(pc, &gc, &oc));
437     if (data->N > 1) {
438       if (!data->deflation) {
439         PetscCall(PetscViewerASCIIPrintf(viewer, "Neumann matrix attached? %s\n", PetscBools[PetscBool3ToBool(data->Neumann)]));
440         PetscCall(PetscViewerASCIIPrintf(viewer, "shared subdomain KSP between SLEPc and PETSc? %s\n", PetscBools[data->share]));
441       } else PetscCall(PetscViewerASCIIPrintf(viewer, "user-supplied deflation matrix\n"));
442       PetscCall(PetscViewerASCIIPrintf(viewer, "coarse correction: %s\n", PCHPDDMCoarseCorrectionTypes[data->correction]));
443       PetscCall(PetscViewerASCIIPrintf(viewer, "on process #0, value%s (+ threshold%s if available) for selecting deflation vectors:", data->N > 2 ? "s" : "", data->N > 2 ? "s" : ""));
444       PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
445       PetscCall(PetscViewerASCIISetTab(viewer, 0));
446       for (i = 1; i < data->N; ++i) {
447         PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, data->levels[i - 1]->nu));
448         if (data->levels[i - 1]->threshold > -0.1) PetscCall(PetscViewerASCIIPrintf(viewer, " (%g)", (double)data->levels[i - 1]->threshold));
449       }
450       PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
451       PetscCall(PetscViewerASCIISetTab(viewer, tabs));
452     }
453     PetscCall(PetscViewerASCIIPrintf(viewer, "grid and operator complexities: %g %g\n", (double)gc, (double)oc));
454     if (data->levels[0]->ksp) {
455       PetscCall(KSPView(data->levels[0]->ksp, viewer));
456       if (data->levels[0]->pc) PetscCall(PCView(data->levels[0]->pc, viewer));
457       for (i = 1; i < data->N; ++i) {
458         if (data->levels[i]->ksp) color = 1;
459         else color = 0;
460         PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
461         PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
462         PetscCall(PetscSubcommCreate(PetscObjectComm((PetscObject)pc), &subcomm));
463         PetscCall(PetscSubcommSetNumber(subcomm, PetscMin(size, 2)));
464         PetscCall(PetscSubcommSetTypeGeneral(subcomm, color, rank));
465         PetscCall(PetscViewerASCIIPushTab(viewer));
466         PetscCall(PetscViewerGetSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer));
467         if (color == 1) {
468           PetscCall(KSPView(data->levels[i]->ksp, subviewer));
469           if (data->levels[i]->pc) PetscCall(PCView(data->levels[i]->pc, subviewer));
470           PetscCall(PetscViewerFlush(subviewer));
471         }
472         PetscCall(PetscViewerRestoreSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer));
473         PetscCall(PetscViewerASCIIPopTab(viewer));
474         PetscCall(PetscSubcommDestroy(&subcomm));
475         PetscCall(PetscViewerFlush(viewer));
476       }
477     }
478   }
479   PetscFunctionReturn(PETSC_SUCCESS);
480 }
481 
482 static PetscErrorCode PCPreSolve_HPDDM(PC pc, KSP ksp, Vec, Vec)
483 {
484   PC_HPDDM *data = (PC_HPDDM *)pc->data;
485   PetscBool flg;
486   Mat       A;
487 
488   PetscFunctionBegin;
489   if (ksp) {
490     PetscCall(PetscObjectTypeCompare((PetscObject)ksp, KSPLSQR, &flg));
491     if (flg && !data->normal) {
492       PetscCall(KSPGetOperators(ksp, &A, nullptr));
493       PetscCall(MatCreateVecs(A, nullptr, &data->normal)); /* temporary Vec used in PCApply_HPDDMShell() for coarse grid corrections */
494     }
495   }
496   PetscFunctionReturn(PETSC_SUCCESS);
497 }
498 
499 static PetscErrorCode PCSetUp_HPDDMShell(PC pc)
500 {
501   PC_HPDDM_Level *ctx;
502   Mat             A, P;
503   Vec             x;
504   const char     *pcpre;
505 
506   PetscFunctionBegin;
507   PetscCall(PCShellGetContext(pc, &ctx));
508   PetscCall(KSPGetOptionsPrefix(ctx->ksp, &pcpre));
509   PetscCall(KSPGetOperators(ctx->ksp, &A, &P));
510   /* smoother */
511   PetscCall(PCSetOptionsPrefix(ctx->pc, pcpre));
512   PetscCall(PCSetOperators(ctx->pc, A, P));
513   if (!ctx->v[0]) {
514     PetscCall(VecDuplicateVecs(ctx->D, 1, &ctx->v[0]));
515     if (!std::is_same<PetscScalar, PetscReal>::value) PetscCall(VecDestroy(&ctx->D));
516     PetscCall(MatCreateVecs(A, &x, nullptr));
517     PetscCall(VecDuplicateVecs(x, 2, &ctx->v[1]));
518     PetscCall(VecDestroy(&x));
519   }
520   std::fill_n(ctx->V, 3, nullptr);
521   PetscFunctionReturn(PETSC_SUCCESS);
522 }
523 
524 template <class Type, typename std::enable_if<std::is_same<Type, Vec>::value>::type * = nullptr>
525 static inline PetscErrorCode PCHPDDMDeflate_Private(PC pc, Type x, Type y)
526 {
527   PC_HPDDM_Level *ctx;
528   PetscScalar    *out;
529 
530   PetscFunctionBegin;
531   PetscCall(PCShellGetContext(pc, &ctx));
532   /* going from PETSc to HPDDM numbering */
533   PetscCall(VecScatterBegin(ctx->scatter, x, ctx->v[0][0], INSERT_VALUES, SCATTER_FORWARD));
534   PetscCall(VecScatterEnd(ctx->scatter, x, ctx->v[0][0], INSERT_VALUES, SCATTER_FORWARD));
535   PetscCall(VecGetArrayWrite(ctx->v[0][0], &out));
536   ctx->P->deflation<false>(nullptr, out, 1); /* y = Q x */
537   PetscCall(VecRestoreArrayWrite(ctx->v[0][0], &out));
538   /* going from HPDDM to PETSc numbering */
539   PetscCall(VecScatterBegin(ctx->scatter, ctx->v[0][0], y, INSERT_VALUES, SCATTER_REVERSE));
540   PetscCall(VecScatterEnd(ctx->scatter, ctx->v[0][0], y, INSERT_VALUES, SCATTER_REVERSE));
541   PetscFunctionReturn(PETSC_SUCCESS);
542 }
543 
544 template <class Type, typename std::enable_if<std::is_same<Type, Mat>::value>::type * = nullptr>
545 static inline PetscErrorCode PCHPDDMDeflate_Private(PC pc, Type X, Type Y)
546 {
547   PC_HPDDM_Level *ctx;
548   Vec             vX, vY, vC;
549   PetscScalar    *out;
550   PetscInt        i, N;
551 
552   PetscFunctionBegin;
553   PetscCall(PCShellGetContext(pc, &ctx));
554   PetscCall(MatGetSize(X, nullptr, &N));
555   /* going from PETSc to HPDDM numbering */
556   for (i = 0; i < N; ++i) {
557     PetscCall(MatDenseGetColumnVecRead(X, i, &vX));
558     PetscCall(MatDenseGetColumnVecWrite(ctx->V[0], i, &vC));
559     PetscCall(VecScatterBegin(ctx->scatter, vX, vC, INSERT_VALUES, SCATTER_FORWARD));
560     PetscCall(VecScatterEnd(ctx->scatter, vX, vC, INSERT_VALUES, SCATTER_FORWARD));
561     PetscCall(MatDenseRestoreColumnVecWrite(ctx->V[0], i, &vC));
562     PetscCall(MatDenseRestoreColumnVecRead(X, i, &vX));
563   }
564   PetscCall(MatDenseGetArrayWrite(ctx->V[0], &out));
565   ctx->P->deflation<false>(nullptr, out, N); /* Y = Q X */
566   PetscCall(MatDenseRestoreArrayWrite(ctx->V[0], &out));
567   /* going from HPDDM to PETSc numbering */
568   for (i = 0; i < N; ++i) {
569     PetscCall(MatDenseGetColumnVecRead(ctx->V[0], i, &vC));
570     PetscCall(MatDenseGetColumnVecWrite(Y, i, &vY));
571     PetscCall(VecScatterBegin(ctx->scatter, vC, vY, INSERT_VALUES, SCATTER_REVERSE));
572     PetscCall(VecScatterEnd(ctx->scatter, vC, vY, INSERT_VALUES, SCATTER_REVERSE));
573     PetscCall(MatDenseRestoreColumnVecWrite(Y, i, &vY));
574     PetscCall(MatDenseRestoreColumnVecRead(ctx->V[0], i, &vC));
575   }
576   PetscFunctionReturn(PETSC_SUCCESS);
577 }
578 
579 /*
580      PCApply_HPDDMShell - Applies a (2) deflated, (1) additive, or (3) balanced coarse correction. In what follows, E = Z Pmat Z^T and Q = Z^T E^-1 Z.
581 
582 .vb
583    (1) y =                Pmat^-1              x + Q x,
584    (2) y =                Pmat^-1 (I - Amat Q) x + Q x (default),
585    (3) y = (I - Q Amat^T) Pmat^-1 (I - Amat Q) x + Q x.
586 .ve
587 
588    Input Parameters:
589 +     pc - preconditioner context
590 -     x - input vector
591 
592    Output Parameter:
593 .     y - output vector
594 
595    Notes:
596      The options of Pmat^1 = pc(Pmat) are prefixed by -pc_hpddm_levels_1_pc_. Z is a tall-and-skiny matrix assembled by HPDDM. The number of processes on which (Z Pmat Z^T) is aggregated is set via -pc_hpddm_coarse_p.
597      The options of (Z Pmat Z^T)^-1 = ksp(Z Pmat Z^T) are prefixed by -pc_hpddm_coarse_ (`KSPPREONLY` and `PCCHOLESKY` by default), unless a multilevel correction is turned on, in which case, this function is called recursively at each level except the coarsest one.
598      (1) and (2) visit the "next" level (in terms of coarsening) once per application, while (3) visits it twice, so it is asymptotically twice costlier. (2) is not symmetric even if both Amat and Pmat are symmetric.
599 
600    Level: advanced
601 
602    Developer Note:
603    Since this is not an actual manual page the material below should be moved to an appropriate manual page with the appropriate context, i.e. explaining when it is used and how
604    to trigger it. Likely the manual page is `PCHPDDM`
605 
606 .seealso: `PCHPDDM`, `PCHPDDMCoarseCorrectionType`
607 */
608 static PetscErrorCode PCApply_HPDDMShell(PC pc, Vec x, Vec y)
609 {
610   PC_HPDDM_Level *ctx;
611   Mat             A;
612 
613   PetscFunctionBegin;
614   PetscCall(PCShellGetContext(pc, &ctx));
615   PetscCheck(ctx->P, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCSHELL from PCHPDDM called with no HPDDM object");
616   PetscCall(KSPGetOperators(ctx->ksp, &A, nullptr));
617   PetscCall(PCHPDDMDeflate_Private(pc, x, y)); /* y = Q x                          */
618   if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_DEFLATED || ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
619     if (!ctx->parent->normal || ctx != ctx->parent->levels[0]) PetscCall(MatMult(A, y, ctx->v[1][0])); /* y = A Q x */
620     else { /* KSPLSQR and finest level */ PetscCall(MatMult(A, y, ctx->parent->normal));               /* y = A Q x                        */
621       PetscCall(MatMultHermitianTranspose(A, ctx->parent->normal, ctx->v[1][0]));                      /* y = A^T A Q x    */
622     }
623     PetscCall(VecWAXPY(ctx->v[1][1], -1.0, ctx->v[1][0], x)); /* y = (I - A Q) x                  */
624     PetscCall(PCApply(ctx->pc, ctx->v[1][1], ctx->v[1][0]));  /* y = M^-1 (I - A Q) x             */
625     if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
626       if (!ctx->parent->normal || ctx != ctx->parent->levels[0]) PetscCall(MatMultHermitianTranspose(A, ctx->v[1][0], ctx->v[1][1])); /* z = A^T y */
627       else {
628         PetscCall(MatMult(A, ctx->v[1][0], ctx->parent->normal));
629         PetscCall(MatMultHermitianTranspose(A, ctx->parent->normal, ctx->v[1][1])); /* z = A^T A y    */
630       }
631       PetscCall(PCHPDDMDeflate_Private(pc, ctx->v[1][1], ctx->v[1][1]));
632       PetscCall(VecAXPBYPCZ(y, -1.0, 1.0, 1.0, ctx->v[1][1], ctx->v[1][0])); /* y = (I - Q A^T) y + Q x */
633     } else PetscCall(VecAXPY(y, 1.0, ctx->v[1][0]));                         /* y = Q M^-1 (I - A Q) x + Q x     */
634   } else {
635     PetscCheck(ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "PCSHELL from PCHPDDM called with an unknown PCHPDDMCoarseCorrectionType %d", ctx->parent->correction);
636     PetscCall(PCApply(ctx->pc, x, ctx->v[1][0]));
637     PetscCall(VecAXPY(y, 1.0, ctx->v[1][0])); /* y = M^-1 x + Q x                 */
638   }
639   PetscFunctionReturn(PETSC_SUCCESS);
640 }
641 
642 /*
643      PCMatApply_HPDDMShell - Variant of PCApply_HPDDMShell() for blocks of vectors.
644 
645    Input Parameters:
646 +     pc - preconditioner context
647 -     X - block of input vectors
648 
649    Output Parameter:
650 .     Y - block of output vectors
651 
652    Level: advanced
653 
654 .seealso: `PCHPDDM`, `PCApply_HPDDMShell()`, `PCHPDDMCoarseCorrectionType`
655 */
656 static PetscErrorCode PCMatApply_HPDDMShell(PC pc, Mat X, Mat Y)
657 {
658   PC_HPDDM_Level *ctx;
659   Mat             A, *ptr;
660   PetscContainer  container = nullptr;
661   PetscScalar    *array;
662   PetscInt        m, M, N, prev = 0;
663   PetscBool       reset = PETSC_FALSE;
664 
665   PetscFunctionBegin;
666   PetscCall(PCShellGetContext(pc, &ctx));
667   PetscCheck(ctx->P, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCSHELL from PCHPDDM called with no HPDDM object");
668   PetscCall(MatGetSize(X, nullptr, &N));
669   PetscCall(KSPGetOperators(ctx->ksp, &A, nullptr));
670   PetscCall(PetscObjectQuery((PetscObject)A, "_HPDDM_MatProduct", (PetscObject *)&container));
671   if (container) { /* MatProduct container already attached */
672     PetscCall(PetscContainerGetPointer(container, (void **)&ptr));
673     if (ptr[1] != ctx->V[2]) /* Mat has changed or may have been set first in KSPHPDDM */
674       for (m = 0; m < 2; ++m) {
675         PetscCall(MatDestroy(ctx->V + m + 1));
676         ctx->V[m + 1] = ptr[m];
677         PetscCall(PetscObjectReference((PetscObject)ctx->V[m + 1]));
678       }
679   }
680   if (ctx->V[1]) PetscCall(MatGetSize(ctx->V[1], nullptr, &prev));
681   if (N != prev || !ctx->V[0]) {
682     PetscCall(MatDestroy(ctx->V));
683     PetscCall(VecGetLocalSize(ctx->v[0][0], &m));
684     PetscCall(MatCreateDense(PetscObjectComm((PetscObject)pc), m, PETSC_DECIDE, PETSC_DECIDE, N, nullptr, ctx->V));
685     if (N != prev) {
686       PetscCall(MatDestroy(ctx->V + 1));
687       PetscCall(MatDestroy(ctx->V + 2));
688       PetscCall(MatGetLocalSize(X, &m, nullptr));
689       PetscCall(MatGetSize(X, &M, nullptr));
690       if (ctx->parent->correction != PC_HPDDM_COARSE_CORRECTION_BALANCED) PetscCall(MatDenseGetArrayWrite(ctx->V[0], &array));
691       else array = nullptr;
692       PetscCall(MatCreateDense(PetscObjectComm((PetscObject)pc), m, PETSC_DECIDE, M, N, array, ctx->V + 1));
693       if (ctx->parent->correction != PC_HPDDM_COARSE_CORRECTION_BALANCED) PetscCall(MatDenseRestoreArrayWrite(ctx->V[0], &array));
694       PetscCall(MatCreateDense(PetscObjectComm((PetscObject)pc), m, PETSC_DECIDE, M, N, nullptr, ctx->V + 2));
695       PetscCall(MatProductCreateWithMat(A, Y, nullptr, ctx->V[1]));
696       PetscCall(MatProductSetType(ctx->V[1], MATPRODUCT_AB));
697       PetscCall(MatProductSetFromOptions(ctx->V[1]));
698       PetscCall(MatProductSymbolic(ctx->V[1]));
699       if (!container) { /* no MatProduct container attached, create one to be queried in KSPHPDDM or at the next call to PCMatApply() */
700         PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)A), &container));
701         PetscCall(PetscObjectCompose((PetscObject)A, "_HPDDM_MatProduct", (PetscObject)container));
702       }
703       PetscCall(PetscContainerSetPointer(container, ctx->V + 1)); /* need to compose B and D from MatProductCreateWithMath(A, B, NULL, D), which are stored in the contiguous array ctx->V */
704     }
705     if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
706       PetscCall(MatProductCreateWithMat(A, ctx->V[1], nullptr, ctx->V[2]));
707       PetscCall(MatProductSetType(ctx->V[2], MATPRODUCT_AtB));
708       PetscCall(MatProductSetFromOptions(ctx->V[2]));
709       PetscCall(MatProductSymbolic(ctx->V[2]));
710     }
711     ctx->P->start(N);
712   }
713   if (N == prev || container) { /* when MatProduct container is attached, always need to MatProductReplaceMats() since KSPHPDDM may have replaced the Mat as well */
714     PetscCall(MatProductReplaceMats(nullptr, Y, nullptr, ctx->V[1]));
715     if (container && ctx->parent->correction != PC_HPDDM_COARSE_CORRECTION_BALANCED) {
716       PetscCall(MatDenseGetArrayWrite(ctx->V[0], &array));
717       PetscCall(MatDensePlaceArray(ctx->V[1], array));
718       PetscCall(MatDenseRestoreArrayWrite(ctx->V[0], &array));
719       reset = PETSC_TRUE;
720     }
721   }
722   PetscCall(PCHPDDMDeflate_Private(pc, X, Y));
723   if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_DEFLATED || ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
724     PetscCall(MatProductNumeric(ctx->V[1]));
725     PetscCall(MatCopy(ctx->V[1], ctx->V[2], SAME_NONZERO_PATTERN));
726     PetscCall(MatAXPY(ctx->V[2], -1.0, X, SAME_NONZERO_PATTERN));
727     PetscCall(PCMatApply(ctx->pc, ctx->V[2], ctx->V[1]));
728     if (ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_BALANCED) {
729       PetscCall(MatProductNumeric(ctx->V[2]));
730       PetscCall(PCHPDDMDeflate_Private(pc, ctx->V[2], ctx->V[2]));
731       PetscCall(MatAXPY(ctx->V[1], -1.0, ctx->V[2], SAME_NONZERO_PATTERN));
732     }
733     PetscCall(MatAXPY(Y, -1.0, ctx->V[1], SAME_NONZERO_PATTERN));
734   } else {
735     PetscCheck(ctx->parent->correction == PC_HPDDM_COARSE_CORRECTION_ADDITIVE, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "PCSHELL from PCHPDDM called with an unknown PCHPDDMCoarseCorrectionType %d", ctx->parent->correction);
736     PetscCall(PCMatApply(ctx->pc, X, ctx->V[1]));
737     PetscCall(MatAXPY(Y, 1.0, ctx->V[1], SAME_NONZERO_PATTERN));
738   }
739   if (reset) PetscCall(MatDenseResetArray(ctx->V[1]));
740   PetscFunctionReturn(PETSC_SUCCESS);
741 }
742 
743 static PetscErrorCode PCDestroy_HPDDMShell(PC pc)
744 {
745   PC_HPDDM_Level *ctx;
746   PetscContainer  container;
747 
748   PetscFunctionBegin;
749   PetscCall(PCShellGetContext(pc, &ctx));
750   PetscCall(HPDDM::Schwarz<PetscScalar>::destroy(ctx, PETSC_TRUE));
751   PetscCall(VecDestroyVecs(1, &ctx->v[0]));
752   PetscCall(VecDestroyVecs(2, &ctx->v[1]));
753   PetscCall(PetscObjectQuery((PetscObject)(ctx->pc)->mat, "_HPDDM_MatProduct", (PetscObject *)&container));
754   PetscCall(PetscContainerDestroy(&container));
755   PetscCall(PetscObjectCompose((PetscObject)(ctx->pc)->mat, "_HPDDM_MatProduct", nullptr));
756   PetscCall(MatDestroy(ctx->V));
757   PetscCall(MatDestroy(ctx->V + 1));
758   PetscCall(MatDestroy(ctx->V + 2));
759   PetscCall(VecDestroy(&ctx->D));
760   PetscCall(VecScatterDestroy(&ctx->scatter));
761   PetscCall(PCDestroy(&ctx->pc));
762   PetscFunctionReturn(PETSC_SUCCESS);
763 }
764 
765 static PetscErrorCode PCHPDDMSolve_Private(const PC_HPDDM_Level *ctx, PetscScalar *rhs, const unsigned short &mu)
766 {
767   Mat      B, X;
768   PetscInt n, N, j = 0;
769 
770   PetscFunctionBegin;
771   PetscCall(KSPGetOperators(ctx->ksp, &B, nullptr));
772   PetscCall(MatGetLocalSize(B, &n, nullptr));
773   PetscCall(MatGetSize(B, &N, nullptr));
774   if (ctx->parent->log_separate) {
775     j = std::distance(ctx->parent->levels, std::find(ctx->parent->levels, ctx->parent->levels + ctx->parent->N, ctx));
776     PetscCall(PetscLogEventBegin(PC_HPDDM_Solve[j], ctx->ksp, nullptr, nullptr, nullptr));
777   }
778   if (mu == 1) {
779     if (!ctx->ksp->vec_rhs) {
780       PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)ctx->ksp), 1, n, N, nullptr, &ctx->ksp->vec_rhs));
781       PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)ctx->ksp), n, N, &ctx->ksp->vec_sol));
782     }
783     PetscCall(VecPlaceArray(ctx->ksp->vec_rhs, rhs));
784     PetscCall(KSPSolve(ctx->ksp, nullptr, nullptr));
785     PetscCall(VecCopy(ctx->ksp->vec_sol, ctx->ksp->vec_rhs));
786     PetscCall(VecResetArray(ctx->ksp->vec_rhs));
787   } else {
788     PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ctx->ksp), n, PETSC_DECIDE, N, mu, rhs, &B));
789     PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ctx->ksp), n, PETSC_DECIDE, N, mu, nullptr, &X));
790     PetscCall(KSPMatSolve(ctx->ksp, B, X));
791     PetscCall(MatCopy(X, B, SAME_NONZERO_PATTERN));
792     PetscCall(MatDestroy(&X));
793     PetscCall(MatDestroy(&B));
794   }
795   if (ctx->parent->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_Solve[j], ctx->ksp, nullptr, nullptr, nullptr));
796   PetscFunctionReturn(PETSC_SUCCESS);
797 }
798 
799 static PetscErrorCode PCHPDDMSetUpNeumannOverlap_Private(PC pc)
800 {
801   PC_HPDDM *data = (PC_HPDDM *)pc->data;
802 
803   PetscFunctionBegin;
804   if (data->setup) {
805     Mat       P;
806     Vec       x, xt = nullptr;
807     PetscReal t = 0.0, s = 0.0;
808 
809     PetscCall(PCGetOperators(pc, nullptr, &P));
810     PetscCall(PetscObjectQuery((PetscObject)P, "__SNES_latest_X", (PetscObject *)&x));
811     PetscCallBack("PCHPDDM Neumann callback", (*data->setup)(data->aux, t, x, xt, s, data->is, data->setup_ctx));
812   }
813   PetscFunctionReturn(PETSC_SUCCESS);
814 }
815 
816 static PetscErrorCode PCHPDDMCreateSubMatrices_Private(Mat mat, PetscInt n, const IS *, const IS *, MatReuse scall, Mat *submat[])
817 {
818   Mat A;
819 
820   PetscFunctionBegin;
821   PetscCheck(n == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatCreateSubMatrices() called to extract %" PetscInt_FMT " submatrices, which is different than 1", n);
822   /* previously composed Mat */
823   PetscCall(PetscObjectQuery((PetscObject)mat, "_PCHPDDM_SubMatrices", (PetscObject *)&A));
824   PetscCheck(A, PETSC_COMM_SELF, PETSC_ERR_PLIB, "SubMatrices not found in Mat");
825   if (scall == MAT_INITIAL_MATRIX) {
826     PetscCall(PetscCalloc1(2, submat)); /* allocate an extra Mat to avoid errors in MatDestroySubMatrices_Dummy() */
827     PetscCall(MatDuplicate(A, MAT_COPY_VALUES, *submat));
828   } else PetscCall(MatCopy(A, (*submat)[0], SAME_NONZERO_PATTERN));
829   PetscFunctionReturn(PETSC_SUCCESS);
830 }
831 
832 static PetscErrorCode PCHPDDMCommunicationAvoidingPCASM_Private(PC pc, Mat C, PetscBool sorted)
833 {
834   void (*op)(void);
835 
836   PetscFunctionBegin;
837   /* previously-composed Mat */
838   PetscCall(PetscObjectCompose((PetscObject)pc->pmat, "_PCHPDDM_SubMatrices", (PetscObject)C));
839   PetscCall(MatGetOperation(pc->pmat, MATOP_CREATE_SUBMATRICES, &op));
840   /* trick suggested by Barry https://lists.mcs.anl.gov/pipermail/petsc-dev/2020-January/025491.html */
841   PetscCall(MatSetOperation(pc->pmat, MATOP_CREATE_SUBMATRICES, (void (*)(void))PCHPDDMCreateSubMatrices_Private));
842   if (sorted) PetscCall(PCASMSetSortIndices(pc, PETSC_FALSE)); /* everything is already sorted */
843   PetscCall(PCSetFromOptions(pc));                             /* otherwise -pc_hpddm_levels_1_pc_asm_sub_mat_type is not used */
844   PetscCall(PCSetUp(pc));
845   /* reset MatCreateSubMatrices() */
846   PetscCall(MatSetOperation(pc->pmat, MATOP_CREATE_SUBMATRICES, op));
847   PetscCall(PetscObjectCompose((PetscObject)pc->pmat, "_PCHPDDM_SubMatrices", nullptr));
848   PetscFunctionReturn(PETSC_SUCCESS);
849 }
850 
851 static PetscErrorCode PCHPDDMPermute_Private(IS is, IS in_is, IS *out_is, Mat in_C, Mat *out_C, IS *p)
852 {
853   IS                           perm;
854   const PetscInt              *ptr;
855   PetscInt                    *concatenate, size, n, bs;
856   std::map<PetscInt, PetscInt> order;
857   PetscBool                    sorted;
858 
859   PetscFunctionBegin;
860   PetscCall(ISSorted(is, &sorted));
861   if (!sorted) {
862     PetscCall(ISGetLocalSize(is, &size));
863     PetscCall(ISGetIndices(is, &ptr));
864     PetscCall(ISGetBlockSize(is, &bs));
865     /* MatCreateSubMatrices(), called by PCASM, follows the global numbering of Pmat */
866     for (n = 0; n < size; n += bs) order.insert(std::make_pair(ptr[n] / bs, n / bs));
867     PetscCall(ISRestoreIndices(is, &ptr));
868     size /= bs;
869     if (out_C) {
870       PetscCall(PetscMalloc1(size, &concatenate));
871       for (const std::pair<const PetscInt, PetscInt> &i : order) *concatenate++ = i.second;
872       concatenate -= size;
873       PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)in_C), bs, size, concatenate, PETSC_OWN_POINTER, &perm));
874       PetscCall(ISSetPermutation(perm));
875       /* permute user-provided Mat so that it matches with MatCreateSubMatrices() numbering */
876       PetscCall(MatPermute(in_C, perm, perm, out_C));
877       if (p) *p = perm;
878       else PetscCall(ISDestroy(&perm)); /* no need to save the permutation */
879     }
880     if (out_is) {
881       PetscCall(PetscMalloc1(size, &concatenate));
882       for (const std::pair<const PetscInt, PetscInt> &i : order) *concatenate++ = i.first;
883       concatenate -= size;
884       /* permute user-provided IS so that it matches with MatCreateSubMatrices() numbering */
885       PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)in_is), bs, size, concatenate, PETSC_OWN_POINTER, out_is));
886     }
887   } else { /* input IS is sorted, nothing to permute, simply duplicate inputs when needed */
888     if (out_C) PetscCall(MatDuplicate(in_C, MAT_COPY_VALUES, out_C));
889     if (out_is) PetscCall(ISDuplicate(in_is, out_is));
890   }
891   PetscFunctionReturn(PETSC_SUCCESS);
892 }
893 
894 static PetscErrorCode PCHPDDMDestroySubMatrices_Private(PetscBool flg, PetscBool algebraic, Mat *sub)
895 {
896   IS is;
897 
898   PetscFunctionBegin;
899   if (!flg) {
900     if (algebraic) {
901       PetscCall(PetscObjectQuery((PetscObject)sub[0], "_PCHPDDM_Embed", (PetscObject *)&is));
902       PetscCall(ISDestroy(&is));
903       PetscCall(PetscObjectCompose((PetscObject)sub[0], "_PCHPDDM_Embed", nullptr));
904       PetscCall(PetscObjectCompose((PetscObject)sub[0], "_PCHPDDM_Compact", nullptr));
905     }
906     PetscCall(MatDestroySubMatrices(algebraic ? 2 : 1, &sub));
907   }
908   PetscFunctionReturn(PETSC_SUCCESS);
909 }
910 
911 static PetscErrorCode PCHPDDMAlgebraicAuxiliaryMat_Private(Mat P, IS *is, Mat *sub[], PetscBool block)
912 {
913   IS         icol[3], irow[2];
914   Mat       *M, Q;
915   PetscReal *ptr;
916   PetscInt  *idx, p = 0, n, bs = PetscAbs(P->cmap->bs);
917   PetscBool  flg;
918 
919   PetscFunctionBegin;
920   PetscCall(ISCreateStride(PETSC_COMM_SELF, P->cmap->N, 0, 1, icol + 2));
921   PetscCall(ISSetBlockSize(icol[2], bs));
922   PetscCall(ISSetIdentity(icol[2]));
923   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATMPISBAIJ, &flg));
924   if (flg) {
925     /* MatCreateSubMatrices() does not handle MATMPISBAIJ properly when iscol != isrow, so convert first to MATMPIBAIJ */
926     PetscCall(MatConvert(P, MATMPIBAIJ, MAT_INITIAL_MATRIX, &Q));
927     std::swap(P, Q);
928   }
929   PetscCall(MatCreateSubMatrices(P, 1, is, icol + 2, MAT_INITIAL_MATRIX, &M));
930   if (flg) {
931     std::swap(P, Q);
932     PetscCall(MatDestroy(&Q));
933   }
934   PetscCall(ISDestroy(icol + 2));
935   PetscCall(ISCreateStride(PETSC_COMM_SELF, M[0]->rmap->N, 0, 1, irow));
936   PetscCall(ISSetBlockSize(irow[0], bs));
937   PetscCall(ISSetIdentity(irow[0]));
938   if (!block) {
939     PetscCall(PetscMalloc2(P->cmap->N, &ptr, P->cmap->N / bs, &idx));
940     PetscCall(MatGetColumnNorms(M[0], NORM_INFINITY, ptr));
941     /* check for nonzero columns so that M[0] may be expressed in compact form */
942     for (n = 0; n < P->cmap->N; n += bs) {
943       if (std::find_if(ptr + n, ptr + n + bs, [](PetscReal v) { return v > PETSC_MACHINE_EPSILON; }) != ptr + n + bs) idx[p++] = n / bs;
944     }
945     PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, p, idx, PETSC_USE_POINTER, icol + 1));
946     PetscCall(ISSetInfo(icol[1], IS_SORTED, IS_GLOBAL, PETSC_TRUE, PETSC_TRUE));
947     PetscCall(ISEmbed(*is, icol[1], PETSC_FALSE, icol + 2));
948     irow[1] = irow[0];
949     /* first Mat will be used in PCASM (if it is used as a PC on this level) and as the left-hand side of GenEO */
950     icol[0] = is[0];
951     PetscCall(MatCreateSubMatrices(M[0], 2, irow, icol, MAT_INITIAL_MATRIX, sub));
952     PetscCall(ISDestroy(icol + 1));
953     PetscCall(PetscFree2(ptr, idx));
954     /* IS used to go back and forth between the augmented and the original local linear system, see eq. (3.4) of [2022b] */
955     PetscCall(PetscObjectCompose((PetscObject)(*sub)[0], "_PCHPDDM_Embed", (PetscObject)icol[2]));
956     /* Mat used in eq. (3.1) of [2022b] */
957     PetscCall(PetscObjectCompose((PetscObject)(*sub)[0], "_PCHPDDM_Compact", (PetscObject)(*sub)[1]));
958   } else {
959     Mat aux;
960     PetscCall(MatSetOption(M[0], MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
961     /* diagonal block of the overlapping rows */
962     PetscCall(MatCreateSubMatrices(M[0], 1, irow, is, MAT_INITIAL_MATRIX, sub));
963     PetscCall(MatDuplicate((*sub)[0], MAT_COPY_VALUES, &aux));
964     PetscCall(MatSetOption(aux, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
965     if (bs == 1) { /* scalar case */
966       Vec sum[2];
967       PetscCall(MatCreateVecs(aux, sum, sum + 1));
968       PetscCall(MatGetRowSum(M[0], sum[0]));
969       PetscCall(MatGetRowSum(aux, sum[1]));
970       /* off-diagonal block row sum (full rows - diagonal block rows) */
971       PetscCall(VecAXPY(sum[0], -1.0, sum[1]));
972       /* subdomain matrix plus off-diagonal block row sum */
973       PetscCall(MatDiagonalSet(aux, sum[0], ADD_VALUES));
974       PetscCall(VecDestroy(sum));
975       PetscCall(VecDestroy(sum + 1));
976     } else { /* vectorial case */
977       /* TODO: missing MatGetValuesBlocked(), so the code below is     */
978       /* an extension of the scalar case for when bs > 1, but it could */
979       /* be more efficient by avoiding all these MatMatMult()          */
980       Mat          sum[2], ones;
981       PetscScalar *ptr;
982       PetscCall(PetscCalloc1(M[0]->cmap->n * bs, &ptr));
983       PetscCall(MatCreateDense(PETSC_COMM_SELF, M[0]->cmap->n, bs, M[0]->cmap->n, bs, ptr, &ones));
984       for (n = 0; n < M[0]->cmap->n; n += bs) {
985         for (p = 0; p < bs; ++p) ptr[n + p * (M[0]->cmap->n + 1)] = 1.0;
986       }
987       PetscCall(MatMatMult(M[0], ones, MAT_INITIAL_MATRIX, PETSC_DEFAULT, sum));
988       PetscCall(MatDestroy(&ones));
989       PetscCall(MatCreateDense(PETSC_COMM_SELF, aux->cmap->n, bs, aux->cmap->n, bs, ptr, &ones));
990       PetscCall(MatDenseSetLDA(ones, M[0]->cmap->n));
991       PetscCall(MatMatMult(aux, ones, MAT_INITIAL_MATRIX, PETSC_DEFAULT, sum + 1));
992       PetscCall(MatDestroy(&ones));
993       PetscCall(PetscFree(ptr));
994       /* off-diagonal block row sum (full rows - diagonal block rows) */
995       PetscCall(MatAXPY(sum[0], -1.0, sum[1], SAME_NONZERO_PATTERN));
996       PetscCall(MatDestroy(sum + 1));
997       /* re-order values to be consistent with MatSetValuesBlocked()           */
998       /* equivalent to MatTranspose() which does not truly handle              */
999       /* MAT_INPLACE_MATRIX in the rectangular case, as it calls PetscMalloc() */
1000       PetscCall(MatDenseGetArrayWrite(sum[0], &ptr));
1001       HPDDM::Wrapper<PetscScalar>::imatcopy<'T'>(bs, sum[0]->rmap->n, ptr, sum[0]->rmap->n, bs);
1002       /* subdomain matrix plus off-diagonal block row sum */
1003       for (n = 0; n < aux->cmap->n / bs; ++n) PetscCall(MatSetValuesBlocked(aux, 1, &n, 1, &n, ptr + n * bs * bs, ADD_VALUES));
1004       PetscCall(MatAssemblyBegin(aux, MAT_FINAL_ASSEMBLY));
1005       PetscCall(MatAssemblyEnd(aux, MAT_FINAL_ASSEMBLY));
1006       PetscCall(MatDenseRestoreArrayWrite(sum[0], &ptr));
1007       PetscCall(MatDestroy(sum));
1008     }
1009     PetscCall(MatSetOption(aux, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
1010     /* left-hand side of GenEO, with the same sparsity pattern as PCASM subdomain solvers  */
1011     PetscCall(PetscObjectCompose((PetscObject)(*sub)[0], "_PCHPDDM_Neumann_Mat", (PetscObject)aux));
1012   }
1013   PetscCall(ISDestroy(irow));
1014   PetscCall(MatDestroySubMatrices(1, &M));
1015   PetscFunctionReturn(PETSC_SUCCESS);
1016 }
1017 
1018 static PetscErrorCode PCSetUp_HPDDM(PC pc)
1019 {
1020   PC_HPDDM                 *data = (PC_HPDDM *)pc->data;
1021   PC                        inner;
1022   KSP                      *ksp;
1023   Mat                      *sub, A, P, N, C = nullptr, uaux = nullptr, weighted, subA[2];
1024   Vec                       xin, v;
1025   std::vector<Vec>          initial;
1026   IS                        is[1], loc, uis = data->is, unsorted = nullptr;
1027   ISLocalToGlobalMapping    l2g;
1028   char                      prefix[256];
1029   const char               *pcpre;
1030   const PetscScalar *const *ev;
1031   PetscInt                  n, requested = data->N, reused = 0;
1032   MatStructure              structure  = UNKNOWN_NONZERO_PATTERN;
1033   PetscBool                 subdomains = PETSC_FALSE, flg = PETSC_FALSE, ismatis, swap = PETSC_FALSE, algebraic = PETSC_FALSE, block = PETSC_FALSE;
1034   DM                        dm;
1035 #if PetscDefined(USE_DEBUG)
1036   IS  dis  = nullptr;
1037   Mat daux = nullptr;
1038 #endif
1039 
1040   PetscFunctionBegin;
1041   PetscCheck(data->levels && data->levels[0], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Not a single level allocated");
1042   PetscCall(PCGetOptionsPrefix(pc, &pcpre));
1043   PetscCall(PCGetOperators(pc, &A, &P));
1044   if (!data->levels[0]->ksp) {
1045     PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &data->levels[0]->ksp));
1046     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%spc_hpddm_%s_", pcpre ? pcpre : "", data->N > 1 ? "levels_1" : "coarse"));
1047     PetscCall(KSPSetOptionsPrefix(data->levels[0]->ksp, prefix));
1048     PetscCall(KSPSetType(data->levels[0]->ksp, KSPPREONLY));
1049   } else if (data->levels[0]->ksp->pc && data->levels[0]->ksp->pc->setupcalled == 1 && data->levels[0]->ksp->pc->reusepreconditioner) {
1050     /* if the fine-level PCSHELL exists, its setup has succeeded, and one wants to reuse it, */
1051     /* then just propagate the appropriate flag to the coarser levels                        */
1052     for (n = 0; n < PETSC_PCHPDDM_MAXLEVELS && data->levels[n]; ++n) {
1053       /* the following KSP and PC may be NULL for some processes, hence the check            */
1054       if (data->levels[n]->ksp) PetscCall(KSPSetReusePreconditioner(data->levels[n]->ksp, PETSC_TRUE));
1055       if (data->levels[n]->pc) PetscCall(PCSetReusePreconditioner(data->levels[n]->pc, PETSC_TRUE));
1056     }
1057     /* early bail out because there is nothing to do */
1058     PetscFunctionReturn(PETSC_SUCCESS);
1059   } else {
1060     /* reset coarser levels */
1061     for (n = 1; n < PETSC_PCHPDDM_MAXLEVELS && data->levels[n]; ++n) {
1062       if (data->levels[n]->ksp && data->levels[n]->ksp->pc && data->levels[n]->ksp->pc->setupcalled == 1 && data->levels[n]->ksp->pc->reusepreconditioner && n < data->N) {
1063         reused = data->N - n;
1064         break;
1065       }
1066       PetscCall(KSPDestroy(&data->levels[n]->ksp));
1067       PetscCall(PCDestroy(&data->levels[n]->pc));
1068     }
1069     /* check if some coarser levels are being reused */
1070     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &reused, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc)));
1071     const int *addr = data->levels[0]->P ? data->levels[0]->P->getAddrLocal() : &HPDDM::i__0;
1072 
1073     if (addr != &HPDDM::i__0 && reused != data->N - 1) {
1074       /* reuse previously computed eigenvectors */
1075       ev = data->levels[0]->P->getVectors();
1076       if (ev) {
1077         initial.reserve(*addr);
1078         PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, data->levels[0]->P->getDof(), ev[0], &xin));
1079         for (n = 0; n < *addr; ++n) {
1080           PetscCall(VecDuplicate(xin, &v));
1081           PetscCall(VecPlaceArray(xin, ev[n]));
1082           PetscCall(VecCopy(xin, v));
1083           initial.emplace_back(v);
1084           PetscCall(VecResetArray(xin));
1085         }
1086         PetscCall(VecDestroy(&xin));
1087       }
1088     }
1089   }
1090   data->N -= reused;
1091   PetscCall(KSPSetOperators(data->levels[0]->ksp, A, P));
1092 
1093   PetscCall(PetscObjectTypeCompare((PetscObject)P, MATIS, &ismatis));
1094   if (!data->is && !ismatis) {
1095     PetscErrorCode (*create)(DM, IS *, Mat *, PetscErrorCode (**)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *), void **) = nullptr;
1096     PetscErrorCode (*usetup)(Mat, PetscReal, Vec, Vec, PetscReal, IS, void *)                                                = nullptr;
1097     void *uctx                                                                                                               = nullptr;
1098 
1099     /* first see if we can get the data from the DM */
1100     PetscCall(MatGetDM(P, &dm));
1101     if (!dm) PetscCall(MatGetDM(A, &dm));
1102     if (!dm) PetscCall(PCGetDM(pc, &dm));
1103     if (dm) { /* this is the hook for DMPLEX and DMDA for which the auxiliary Mat is the local Neumann matrix */
1104       PetscCall(PetscObjectQueryFunction((PetscObject)dm, "DMCreateNeumannOverlap_C", &create));
1105       if (create) {
1106         PetscCall((*create)(dm, &uis, &uaux, &usetup, &uctx));
1107         if (data->Neumann == PETSC_BOOL3_UNKNOWN) data->Neumann = PETSC_BOOL3_TRUE; /* set the value only if it was not already provided by the user */
1108       }
1109     }
1110     if (!create) {
1111       if (!uis) {
1112         PetscCall(PetscObjectQuery((PetscObject)pc, "_PCHPDDM_Neumann_IS", (PetscObject *)&uis));
1113         PetscCall(PetscObjectReference((PetscObject)uis));
1114       }
1115       if (!uaux) {
1116         PetscCall(PetscObjectQuery((PetscObject)pc, "_PCHPDDM_Neumann_Mat", (PetscObject *)&uaux));
1117         PetscCall(PetscObjectReference((PetscObject)uaux));
1118       }
1119       /* look inside the Pmat instead of the PC, needed for MatSchurComplementComputeExplicitOperator() */
1120       if (!uis) {
1121         PetscCall(PetscObjectQuery((PetscObject)P, "_PCHPDDM_Neumann_IS", (PetscObject *)&uis));
1122         PetscCall(PetscObjectReference((PetscObject)uis));
1123       }
1124       if (!uaux) {
1125         PetscCall(PetscObjectQuery((PetscObject)P, "_PCHPDDM_Neumann_Mat", (PetscObject *)&uaux));
1126         PetscCall(PetscObjectReference((PetscObject)uaux));
1127       }
1128     }
1129     PetscCall(PCHPDDMSetAuxiliaryMat(pc, uis, uaux, usetup, uctx));
1130     PetscCall(MatDestroy(&uaux));
1131     PetscCall(ISDestroy(&uis));
1132   }
1133 
1134   if (!ismatis) {
1135     PetscCall(PCHPDDMSetUpNeumannOverlap_Private(pc));
1136     PetscCall(PetscOptionsGetBool(nullptr, pcpre, "-pc_hpddm_block_splitting", &block, nullptr));
1137     if (data->is && block) {
1138       PetscCall(ISDestroy(&data->is));
1139       PetscCall(MatDestroy(&data->aux));
1140     }
1141     if (!data->is && data->N > 1) {
1142       char type[256] = {}; /* same size as in src/ksp/pc/interface/pcset.c */
1143       PetscCall(PetscObjectTypeCompareAny((PetscObject)P, &flg, MATNORMAL, MATNORMALHERMITIAN, ""));
1144       if (flg || (A->rmap->N != A->cmap->N && P->rmap->N == P->cmap->N && P->rmap->N == A->cmap->N)) {
1145         Mat B;
1146         PetscCall(PCHPDDMSetAuxiliaryMatNormal_Private(pc, A, P, &B, pcpre));
1147         if (data->correction == PC_HPDDM_COARSE_CORRECTION_DEFLATED) data->correction = PC_HPDDM_COARSE_CORRECTION_BALANCED;
1148         PetscCall(MatDestroy(&B));
1149       } else {
1150         PetscCall(PetscObjectTypeCompare((PetscObject)P, MATSCHURCOMPLEMENT, &flg));
1151         if (flg) {
1152           Mat                        A00, P00, A01, A10, A11, B, N;
1153           const PetscScalar         *array;
1154           PetscReal                  norm;
1155           MatSchurComplementAinvType type;
1156 
1157           PetscCall(MatSchurComplementGetSubMatrices(P, &A00, &P00, &A01, &A10, &A11));
1158           if (A11) {
1159             PetscCall(MatNorm(A11, NORM_INFINITY, &norm));
1160             PetscCheck(norm < PETSC_SMALL, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Nonzero A11 block");
1161           }
1162           if (PetscDefined(USE_DEBUG)) {
1163             Mat T, U = nullptr;
1164             IS  z;
1165             PetscCall(PetscObjectTypeCompare((PetscObject)A10, MATTRANSPOSEVIRTUAL, &flg));
1166             if (flg) PetscCall(MatTransposeGetMat(A10, &U));
1167             else {
1168               PetscCall(PetscObjectTypeCompare((PetscObject)A10, MATHERMITIANTRANSPOSEVIRTUAL, &flg));
1169               if (flg) PetscCall(MatHermitianTransposeGetMat(A10, &U));
1170             }
1171             if (U) PetscCall(MatDuplicate(U, MAT_COPY_VALUES, &T));
1172             else PetscCall(MatHermitianTranspose(A10, MAT_INITIAL_MATRIX, &T));
1173             PetscCall(PetscLayoutCompare(T->rmap, A01->rmap, &flg));
1174             if (flg) {
1175               PetscCall(PetscLayoutCompare(T->cmap, A01->cmap, &flg));
1176               if (flg) {
1177                 PetscCall(MatFindZeroRows(A01, &z)); /* for essential boundary conditions, some implementations will */
1178                 if (z) {                             /*  zero rows in [P00 A01] except for the diagonal of P00       */
1179                   PetscCall(MatSetOption(T, MAT_NO_OFF_PROC_ZERO_ROWS, PETSC_TRUE));
1180                   PetscCall(MatZeroRowsIS(T, z, 0.0, nullptr, nullptr)); /* corresponding zero rows from A01 */
1181                   PetscCall(ISDestroy(&z));
1182                 }
1183                 PetscCall(MatMultEqual(A01, T, 10, &flg));
1184                 PetscCheck(flg, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "A01 != A10^T");
1185               } else PetscCall(PetscInfo(pc, "A01 and A10^T have non-congruent column layouts, cannot test for equality\n"));
1186             }
1187             PetscCall(MatDestroy(&T));
1188           }
1189           PetscCall(MatCreateVecs(P00, &v, nullptr));
1190           PetscCall(MatSchurComplementGetAinvType(P, &type));
1191           PetscCheck(type == MAT_SCHUR_COMPLEMENT_AINV_DIAG || type == MAT_SCHUR_COMPLEMENT_AINV_LUMP, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "-%smat_schur_complement_ainv_type %s", ((PetscObject)P)->prefix ? ((PetscObject)P)->prefix : "", MatSchurComplementAinvTypes[type]);
1192           if (type == MAT_SCHUR_COMPLEMENT_AINV_LUMP) {
1193             PetscCall(MatGetRowSum(P00, v));
1194             if (A00 == P00) PetscCall(PetscObjectReference((PetscObject)A00));
1195             PetscCall(MatDestroy(&P00));
1196             PetscCall(VecGetArrayRead(v, &array));
1197             PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)A00), A00->rmap->n, A00->cmap->n, A00->rmap->N, A00->cmap->N, 1, nullptr, 0, nullptr, &P00));
1198             PetscCall(MatSetOption(P00, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
1199             for (n = A00->rmap->rstart; n < A00->rmap->rend; ++n) PetscCall(MatSetValue(P00, n, n, array[n - A00->rmap->rstart], INSERT_VALUES));
1200             PetscCall(MatAssemblyBegin(P00, MAT_FINAL_ASSEMBLY));
1201             PetscCall(MatAssemblyEnd(P00, MAT_FINAL_ASSEMBLY));
1202             PetscCall(VecRestoreArrayRead(v, &array));
1203             PetscCall(MatSchurComplementUpdateSubMatrices(P, A00, P00, A01, A10, A11)); /* replace P00 by diag(sum of each row of P00) */
1204             PetscCall(MatDestroy(&P00));
1205           } else PetscCall(MatGetDiagonal(P00, v));
1206           PetscCall(VecReciprocal(v)); /* inv(diag(P00))       */
1207           PetscCall(VecSqrtAbs(v));    /* sqrt(inv(diag(P00))) */
1208           PetscCall(MatDuplicate(A01, MAT_COPY_VALUES, &B));
1209           PetscCall(MatDiagonalScale(B, v, nullptr));
1210           PetscCall(VecDestroy(&v));
1211           PetscCall(MatCreateNormalHermitian(B, &N));
1212           PetscCall(PCHPDDMSetAuxiliaryMatNormal_Private(pc, B, N, &P, pcpre));
1213           PetscCall(PetscObjectTypeCompare((PetscObject)data->aux, MATSEQAIJ, &flg));
1214           if (!flg) {
1215             PetscCall(MatDestroy(&P));
1216             P = N;
1217             PetscCall(PetscObjectReference((PetscObject)P));
1218           } else PetscCall(MatScale(P, -1.0));
1219           PetscCall(MatScale(N, -1.0));
1220           PetscCall(PCSetOperators(pc, N, P)); /* replace P by -A01' inv(diag(P00)) A01 */
1221           PetscCall(MatDestroy(&N));
1222           PetscCall(MatDestroy(&P));
1223           PetscCall(MatDestroy(&B));
1224           PetscFunctionReturn(PETSC_SUCCESS);
1225         } else {
1226           PetscCall(PetscOptionsGetString(nullptr, pcpre, "-pc_hpddm_levels_1_st_pc_type", type, sizeof(type), nullptr));
1227           PetscCall(PetscStrcmp(type, PCMAT, &algebraic));
1228           PetscCall(PetscOptionsGetBool(nullptr, pcpre, "-pc_hpddm_block_splitting", &block, nullptr));
1229           PetscCheck(!algebraic || !block, PetscObjectComm((PetscObject)P), PETSC_ERR_ARG_INCOMP, "-pc_hpddm_levels_1_st_pc_type mat and -pc_hpddm_block_splitting");
1230           if (block) algebraic = PETSC_TRUE;
1231           if (algebraic) {
1232             PetscCall(ISCreateStride(PETSC_COMM_SELF, P->rmap->n, P->rmap->rstart, 1, &data->is));
1233             PetscCall(MatIncreaseOverlap(P, 1, &data->is, 1));
1234             PetscCall(ISSort(data->is));
1235           } else PetscCall(PetscInfo(pc, "Cannot assemble a fully-algebraic coarse operator with an assembled Pmat and -%spc_hpddm_levels_1_st_pc_type != mat and -%spc_hpddm_block_splitting != true\n", pcpre ? pcpre : "", pcpre ? pcpre : ""));
1236         }
1237       }
1238     }
1239   }
1240 #if PetscDefined(USE_DEBUG)
1241   if (data->is) PetscCall(ISDuplicate(data->is, &dis));
1242   if (data->aux) PetscCall(MatDuplicate(data->aux, MAT_COPY_VALUES, &daux));
1243 #endif
1244   if (data->is || (ismatis && data->N > 1)) {
1245     if (ismatis) {
1246       std::initializer_list<std::string> list = {MATSEQBAIJ, MATSEQSBAIJ};
1247       PetscCall(MatISGetLocalMat(P, &N));
1248       std::initializer_list<std::string>::const_iterator it = std::find(list.begin(), list.end(), ((PetscObject)N)->type_name);
1249       PetscCall(MatISRestoreLocalMat(P, &N));
1250       switch (std::distance(list.begin(), it)) {
1251       case 0:
1252         PetscCall(MatConvert(P, MATMPIBAIJ, MAT_INITIAL_MATRIX, &C));
1253         break;
1254       case 1:
1255         /* MatCreateSubMatrices() does not work with MATSBAIJ and unsorted ISes, so convert to MPIBAIJ */
1256         PetscCall(MatConvert(P, MATMPIBAIJ, MAT_INITIAL_MATRIX, &C));
1257         PetscCall(MatSetOption(C, MAT_SYMMETRIC, PETSC_TRUE));
1258         break;
1259       default:
1260         PetscCall(MatConvert(P, MATMPIAIJ, MAT_INITIAL_MATRIX, &C));
1261       }
1262       PetscCall(MatISGetLocalToGlobalMapping(P, &l2g, nullptr));
1263       PetscCall(PetscObjectReference((PetscObject)P));
1264       PetscCall(KSPSetOperators(data->levels[0]->ksp, A, C));
1265       std::swap(C, P);
1266       PetscCall(ISLocalToGlobalMappingGetSize(l2g, &n));
1267       PetscCall(ISCreateStride(PETSC_COMM_SELF, n, 0, 1, &loc));
1268       PetscCall(ISLocalToGlobalMappingApplyIS(l2g, loc, &is[0]));
1269       PetscCall(ISDestroy(&loc));
1270       /* the auxiliary Mat is _not_ the local Neumann matrix                                */
1271       /* it is the local Neumann matrix augmented (with zeros) through MatIncreaseOverlap() */
1272       data->Neumann = PETSC_BOOL3_FALSE;
1273       structure     = SAME_NONZERO_PATTERN;
1274     } else {
1275       is[0] = data->is;
1276       if (algebraic) subdomains = PETSC_TRUE;
1277       PetscCall(PetscOptionsGetBool(nullptr, pcpre, "-pc_hpddm_define_subdomains", &subdomains, nullptr));
1278       if (PetscBool3ToBool(data->Neumann)) {
1279         PetscCheck(!block, PetscObjectComm((PetscObject)P), PETSC_ERR_ARG_INCOMP, "-pc_hpddm_block_splitting and -pc_hpddm_has_neumann");
1280         PetscCheck(!algebraic, PetscObjectComm((PetscObject)P), PETSC_ERR_ARG_INCOMP, "-pc_hpddm_levels_1_st_pc_type mat and -pc_hpddm_has_neumann");
1281       }
1282       if (PetscBool3ToBool(data->Neumann) || block) structure = SAME_NONZERO_PATTERN;
1283       PetscCall(ISCreateStride(PetscObjectComm((PetscObject)data->is), P->rmap->n, P->rmap->rstart, 1, &loc));
1284     }
1285     PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%spc_hpddm_levels_1_", pcpre ? pcpre : ""));
1286     PetscCall(PetscOptionsGetEnum(nullptr, prefix, "-st_matstructure", MatStructures, (PetscEnum *)&structure, &flg)); /* if not user-provided, force its value when possible */
1287     if (!flg && structure == SAME_NONZERO_PATTERN) {                                                                   /* cannot call STSetMatStructure() yet, insert the appropriate option in the database, parsed by STSetFromOptions() */
1288       PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "-%spc_hpddm_levels_1_st_matstructure", pcpre ? pcpre : ""));
1289       PetscCall(PetscOptionsSetValue(nullptr, prefix, MatStructures[structure]));
1290     }
1291     flg = PETSC_FALSE;
1292     if (data->share) {
1293       data->share = PETSC_FALSE; /* will be reset to PETSC_TRUE if none of the conditions below are true */
1294       if (!subdomains) PetscCall(PetscInfo(pc, "Cannot share subdomain KSP between SLEPc and PETSc since -%spc_hpddm_define_subdomains is not true\n", pcpre ? pcpre : ""));
1295       else if (data->deflation) PetscCall(PetscInfo(pc, "Nothing to share since PCHPDDMSetDeflationMat() has been called\n"));
1296       else if (ismatis) PetscCall(PetscInfo(pc, "Cannot share subdomain KSP between SLEPc and PETSc with a Pmat of type MATIS\n"));
1297       else if (!algebraic && structure != SAME_NONZERO_PATTERN)
1298         PetscCall(PetscInfo(pc, "Cannot share subdomain KSP between SLEPc and PETSc since -%spc_hpddm_levels_1_st_matstructure %s (!= %s)\n", pcpre ? pcpre : "", MatStructures[structure], MatStructures[SAME_NONZERO_PATTERN]));
1299       else data->share = PETSC_TRUE;
1300     }
1301     if (!ismatis) {
1302       if (data->share || (!PetscBool3ToBool(data->Neumann) && subdomains)) PetscCall(ISDuplicate(is[0], &unsorted));
1303       else unsorted = is[0];
1304     }
1305     if (data->N > 1 && (data->aux || ismatis || algebraic)) {
1306       PetscCheck(loadedSym, PETSC_COMM_SELF, PETSC_ERR_PLIB, "HPDDM library not loaded, cannot use more than one level");
1307       PetscCall(MatSetOption(P, MAT_SUBMAT_SINGLEIS, PETSC_TRUE));
1308       if (ismatis) {
1309         /* needed by HPDDM (currently) so that the partition of unity is 0 on subdomain interfaces */
1310         PetscCall(MatIncreaseOverlap(P, 1, is, 1));
1311         PetscCall(ISDestroy(&data->is));
1312         data->is = is[0];
1313       } else {
1314         if (PetscDefined(USE_DEBUG)) {
1315           PetscBool equal;
1316           IS        intersect;
1317 
1318           PetscCall(ISIntersect(data->is, loc, &intersect));
1319           PetscCall(ISEqualUnsorted(loc, intersect, &equal));
1320           PetscCall(ISDestroy(&intersect));
1321           PetscCheck(equal, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "IS of the auxiliary Mat does not include all local rows of A");
1322         }
1323         PetscCall(PetscObjectComposeFunction((PetscObject)pc->pmat, "PCHPDDMAlgebraicAuxiliaryMat_Private_C", PCHPDDMAlgebraicAuxiliaryMat_Private));
1324         if (!PetscBool3ToBool(data->Neumann) && !algebraic) {
1325           PetscCall(PetscObjectTypeCompare((PetscObject)P, MATMPISBAIJ, &flg));
1326           if (flg) {
1327             /* maybe better to ISSort(is[0]), MatCreateSubMatrices(), and then MatPermute() */
1328             /* but there is no MatPermute_SeqSBAIJ(), so as before, just use MATMPIBAIJ     */
1329             PetscCall(MatConvert(P, MATMPIBAIJ, MAT_INITIAL_MATRIX, &uaux));
1330             flg = PETSC_FALSE;
1331           }
1332         }
1333       }
1334       if (algebraic) {
1335         PetscUseMethod(pc->pmat, "PCHPDDMAlgebraicAuxiliaryMat_Private_C", (Mat, IS *, Mat *[], PetscBool), (P, is, &sub, block));
1336         if (block) {
1337           PetscCall(PetscObjectQuery((PetscObject)sub[0], "_PCHPDDM_Neumann_Mat", (PetscObject *)&data->aux));
1338           PetscCall(PetscObjectCompose((PetscObject)sub[0], "_PCHPDDM_Neumann_Mat", nullptr));
1339         }
1340       } else if (!uaux) {
1341         if (PetscBool3ToBool(data->Neumann)) sub = &data->aux;
1342         else PetscCall(MatCreateSubMatrices(P, 1, is, is, MAT_INITIAL_MATRIX, &sub));
1343       } else {
1344         PetscCall(MatCreateSubMatrices(uaux, 1, is, is, MAT_INITIAL_MATRIX, &sub));
1345         PetscCall(MatDestroy(&uaux));
1346         PetscCall(MatConvert(sub[0], MATSEQSBAIJ, MAT_INPLACE_MATRIX, sub));
1347       }
1348       /* Vec holding the partition of unity */
1349       if (!data->levels[0]->D) {
1350         PetscCall(ISGetLocalSize(data->is, &n));
1351         PetscCall(VecCreateMPI(PETSC_COMM_SELF, n, PETSC_DETERMINE, &data->levels[0]->D));
1352       }
1353       if (data->share) {
1354         Mat      D;
1355         IS       perm = nullptr;
1356         PetscInt size = -1;
1357         if (!data->levels[0]->pc) {
1358           PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%spc_hpddm_levels_1_", pcpre ? pcpre : ""));
1359           PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), &data->levels[0]->pc));
1360           PetscCall(PCSetOptionsPrefix(data->levels[0]->pc, prefix));
1361           PetscCall(PCSetOperators(data->levels[0]->pc, A, P));
1362         }
1363         PetscCall(PCSetType(data->levels[0]->pc, PCASM));
1364         if (!data->levels[0]->pc->setupcalled) {
1365           IS sorted; /* PCASM will sort the input IS, duplicate it to return an unmodified (PCHPDDM) input IS */
1366           PetscCall(ISDuplicate(is[0], &sorted));
1367           PetscCall(PCASMSetLocalSubdomains(data->levels[0]->pc, 1, &sorted, &loc));
1368           PetscCall(PetscObjectDereference((PetscObject)sorted));
1369         }
1370         PetscCall(PCSetFromOptions(data->levels[0]->pc));
1371         if (block) {
1372           PetscCall(PCHPDDMPermute_Private(unsorted, data->is, &uis, sub[0], &C, &perm));
1373           PetscCall(PCHPDDMCommunicationAvoidingPCASM_Private(data->levels[0]->pc, C, algebraic));
1374         } else PetscCall(PCSetUp(data->levels[0]->pc));
1375         PetscTryMethod(data->levels[0]->pc, "PCASMGetSubKSP_C", (PC, PetscInt *, PetscInt *, KSP **), (data->levels[0]->pc, &size, nullptr, &ksp));
1376         if (size != 1) {
1377           data->share = PETSC_FALSE;
1378           PetscCheck(size == -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of subdomain solver %" PetscInt_FMT " != 1", size);
1379           PetscCall(PetscInfo(pc, "Cannot share subdomain KSP between SLEPc and PETSc since PCASMGetSubKSP() not found in fine-level PC\n"));
1380           PetscCall(ISDestroy(&unsorted));
1381           unsorted = is[0];
1382         } else {
1383           if (!block) PetscCall(PCHPDDMPermute_Private(unsorted, data->is, &uis, PetscBool3ToBool(data->Neumann) ? sub[0] : data->aux, &C, &perm));
1384           if (!PetscBool3ToBool(data->Neumann) && !block) {
1385             PetscCall(MatPermute(sub[0], perm, perm, &D)); /* permute since PCASM will call ISSort() */
1386             PetscCall(MatHeaderReplace(sub[0], &D));
1387           }
1388           if (data->B) { /* see PCHPDDMSetRHSMat() */
1389             PetscCall(MatPermute(data->B, perm, perm, &D));
1390             PetscCall(MatHeaderReplace(data->B, &D));
1391           }
1392           PetscCall(ISDestroy(&perm));
1393           const char *matpre;
1394           PetscBool   cmp[4];
1395           PetscCall(KSPGetOperators(ksp[0], subA, subA + 1));
1396           PetscCall(MatDuplicate(subA[1], MAT_SHARE_NONZERO_PATTERN, &D));
1397           PetscCall(MatGetOptionsPrefix(subA[1], &matpre));
1398           PetscCall(MatSetOptionsPrefix(D, matpre));
1399           PetscCall(PetscObjectTypeCompare((PetscObject)D, MATNORMAL, cmp));
1400           PetscCall(PetscObjectTypeCompare((PetscObject)C, MATNORMAL, cmp + 1));
1401           if (!cmp[0]) PetscCall(PetscObjectTypeCompare((PetscObject)D, MATNORMALHERMITIAN, cmp + 2));
1402           else cmp[2] = PETSC_FALSE;
1403           if (!cmp[1]) PetscCall(PetscObjectTypeCompare((PetscObject)C, MATNORMALHERMITIAN, cmp + 3));
1404           else cmp[3] = PETSC_FALSE;
1405           PetscCheck(cmp[0] == cmp[1] && cmp[2] == cmp[3], PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "-%spc_hpddm_levels_1_pc_asm_sub_mat_type %s and auxiliary Mat of type %s", pcpre ? pcpre : "", ((PetscObject)D)->type_name, ((PetscObject)C)->type_name);
1406           if (!cmp[0] && !cmp[2]) {
1407             if (!block) PetscCall(MatAXPY(D, 1.0, C, SUBSET_NONZERO_PATTERN));
1408             else {
1409               PetscCall(MatMissingDiagonal(D, cmp, nullptr));
1410               if (cmp[0]) structure = DIFFERENT_NONZERO_PATTERN; /* data->aux has no missing diagonal entry */
1411               PetscCall(MatAXPY(D, 1.0, data->aux, structure));
1412             }
1413           } else {
1414             Mat mat[2];
1415             if (cmp[0]) {
1416               PetscCall(MatNormalGetMat(D, mat));
1417               PetscCall(MatNormalGetMat(C, mat + 1));
1418             } else {
1419               PetscCall(MatNormalHermitianGetMat(D, mat));
1420               PetscCall(MatNormalHermitianGetMat(C, mat + 1));
1421             }
1422             PetscCall(MatAXPY(mat[0], 1.0, mat[1], SUBSET_NONZERO_PATTERN));
1423           }
1424           PetscCall(MatPropagateSymmetryOptions(C, D));
1425           PetscCall(MatDestroy(&C));
1426           C = D;
1427           /* swap pointers so that variables stay consistent throughout PCSetUp() */
1428           std::swap(C, data->aux);
1429           std::swap(uis, data->is);
1430           swap = PETSC_TRUE;
1431         }
1432       }
1433       if (!data->levels[0]->scatter) {
1434         PetscCall(MatCreateVecs(P, &xin, nullptr));
1435         if (ismatis) PetscCall(MatDestroy(&P));
1436         PetscCall(VecScatterCreate(xin, data->is, data->levels[0]->D, nullptr, &data->levels[0]->scatter));
1437         PetscCall(VecDestroy(&xin));
1438       }
1439       if (data->levels[0]->P) {
1440         /* if the pattern is the same and PCSetUp() has previously succeeded, reuse HPDDM buffers and connectivity */
1441         PetscCall(HPDDM::Schwarz<PetscScalar>::destroy(data->levels[0], pc->setupcalled < 1 || pc->flag == DIFFERENT_NONZERO_PATTERN ? PETSC_TRUE : PETSC_FALSE));
1442       }
1443       if (!data->levels[0]->P) data->levels[0]->P = new HPDDM::Schwarz<PetscScalar>();
1444       if (data->log_separate) PetscCall(PetscLogEventBegin(PC_HPDDM_SetUp[0], data->levels[0]->ksp, nullptr, nullptr, nullptr));
1445       else PetscCall(PetscLogEventBegin(PC_HPDDM_Strc, data->levels[0]->ksp, nullptr, nullptr, nullptr));
1446       /* HPDDM internal data structure */
1447       PetscCall(data->levels[0]->P->structure(loc, data->is, sub[0], ismatis ? C : data->aux, data->levels));
1448       if (!data->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_Strc, data->levels[0]->ksp, nullptr, nullptr, nullptr));
1449       /* matrix pencil of the generalized eigenvalue problem on the overlap (GenEO) */
1450       if (data->deflation) weighted = data->aux;
1451       else if (!data->B) {
1452         PetscBool cmp[2];
1453         PetscCall(MatDuplicate(sub[0], MAT_COPY_VALUES, &weighted));
1454         PetscCall(PetscObjectTypeCompare((PetscObject)weighted, MATNORMAL, cmp));
1455         if (!cmp[0]) PetscCall(PetscObjectTypeCompare((PetscObject)weighted, MATNORMALHERMITIAN, cmp + 1));
1456         else cmp[1] = PETSC_FALSE;
1457         if (!cmp[0] && !cmp[1]) PetscCall(MatDiagonalScale(weighted, data->levels[0]->D, data->levels[0]->D));
1458         else { /* MATNORMAL applies MatDiagonalScale() in a matrix-free fashion, not what is needed since this won't be passed to SLEPc during the eigensolve */
1459           if (cmp[0]) PetscCall(MatNormalGetMat(weighted, &data->B));
1460           else PetscCall(MatNormalHermitianGetMat(weighted, &data->B));
1461           PetscCall(MatDiagonalScale(data->B, nullptr, data->levels[0]->D));
1462           data->B = nullptr;
1463           flg     = PETSC_FALSE;
1464         }
1465         /* neither MatDuplicate() nor MatDiagonaleScale() handles the symmetry options, so propagate the options explicitly */
1466         /* only useful for -mat_type baij -pc_hpddm_levels_1_st_pc_type cholesky (no problem with MATAIJ or MATSBAIJ)       */
1467         PetscCall(MatPropagateSymmetryOptions(sub[0], weighted));
1468       } else weighted = data->B;
1469       /* SLEPc is used inside the loaded symbol */
1470       PetscCall((*loadedSym)(data->levels[0]->P, data->is, ismatis ? C : (algebraic && !block ? sub[0] : data->aux), weighted, data->B, initial, data->levels));
1471       if (data->share) {
1472         Mat st[2];
1473         PetscCall(KSPGetOperators(ksp[0], st, st + 1));
1474         PetscCall(MatCopy(subA[0], st[0], structure));
1475         if (subA[1] != subA[0] || st[1] != st[0]) PetscCall(MatCopy(subA[1], st[1], SAME_NONZERO_PATTERN));
1476       }
1477       if (data->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_SetUp[0], data->levels[0]->ksp, nullptr, nullptr, nullptr));
1478       if (ismatis) PetscCall(MatISGetLocalMat(C, &N));
1479       else N = data->aux;
1480       P = sub[0];
1481       /* going through the grid hierarchy */
1482       for (n = 1; n < data->N; ++n) {
1483         if (data->log_separate) PetscCall(PetscLogEventBegin(PC_HPDDM_SetUp[n], data->levels[n]->ksp, nullptr, nullptr, nullptr));
1484         /* method composed in the loaded symbol since there, SLEPc is used as well */
1485         PetscTryMethod(data->levels[0]->ksp, "PCHPDDMSetUp_Private_C", (Mat *, Mat *, PetscInt, PetscInt *const, PC_HPDDM_Level **const), (&P, &N, n, &data->N, data->levels));
1486         if (data->log_separate) PetscCall(PetscLogEventEnd(PC_HPDDM_SetUp[n], data->levels[n]->ksp, nullptr, nullptr, nullptr));
1487       }
1488       /* reset to NULL to avoid any faulty use */
1489       PetscCall(PetscObjectComposeFunction((PetscObject)data->levels[0]->ksp, "PCHPDDMSetUp_Private_C", nullptr));
1490       if (!ismatis) PetscCall(PetscObjectComposeFunction((PetscObject)pc->pmat, "PCHPDDMAlgebraicAuxiliaryMat_C", nullptr));
1491       else PetscCall(PetscObjectDereference((PetscObject)C)); /* matching PetscObjectReference() above */
1492       for (n = 0; n < data->N - 1; ++n)
1493         if (data->levels[n]->P) {
1494           /* HPDDM internal work buffers */
1495           data->levels[n]->P->setBuffer();
1496           data->levels[n]->P->super::start();
1497         }
1498       if (ismatis || !subdomains) PetscCall(PCHPDDMDestroySubMatrices_Private(PetscBool3ToBool(data->Neumann), PetscBool(algebraic && !block), sub));
1499       if (ismatis) data->is = nullptr;
1500       for (n = 0; n < data->N - 1 + (reused > 0); ++n) {
1501         if (data->levels[n]->P) {
1502           PC spc;
1503 
1504           /* force the PC to be PCSHELL to do the coarse grid corrections */
1505           PetscCall(KSPSetSkipPCSetFromOptions(data->levels[n]->ksp, PETSC_TRUE));
1506           PetscCall(KSPGetPC(data->levels[n]->ksp, &spc));
1507           PetscCall(PCSetType(spc, PCSHELL));
1508           PetscCall(PCShellSetContext(spc, data->levels[n]));
1509           PetscCall(PCShellSetSetUp(spc, PCSetUp_HPDDMShell));
1510           PetscCall(PCShellSetApply(spc, PCApply_HPDDMShell));
1511           PetscCall(PCShellSetMatApply(spc, PCMatApply_HPDDMShell));
1512           PetscCall(PCShellSetDestroy(spc, PCDestroy_HPDDMShell));
1513           if (!data->levels[n]->pc) PetscCall(PCCreate(PetscObjectComm((PetscObject)data->levels[n]->ksp), &data->levels[n]->pc));
1514           if (n < reused) {
1515             PetscCall(PCSetReusePreconditioner(spc, PETSC_TRUE));
1516             PetscCall(PCSetReusePreconditioner(data->levels[n]->pc, PETSC_TRUE));
1517           }
1518           PetscCall(PCSetUp(spc));
1519         }
1520       }
1521       PetscCall(PetscObjectComposeFunction((PetscObject)pc->pmat, "PCHPDDMAlgebraicAuxiliaryMat_Private_C", nullptr));
1522     } else flg = reused ? PETSC_FALSE : PETSC_TRUE;
1523     if (!ismatis && subdomains) {
1524       if (flg) PetscCall(KSPGetPC(data->levels[0]->ksp, &inner));
1525       else inner = data->levels[0]->pc;
1526       if (inner) {
1527         if (!inner->setupcalled) PetscCall(PCSetType(inner, PCASM));
1528         PetscCall(PCSetFromOptions(inner));
1529         PetscCall(PetscStrcmp(((PetscObject)inner)->type_name, PCASM, &flg));
1530         if (flg) {
1531           if (!inner->setupcalled) { /* evaluates to PETSC_FALSE when -pc_hpddm_block_splitting */
1532             IS sorted;               /* PCASM will sort the input IS, duplicate it to return an unmodified (PCHPDDM) input IS */
1533             PetscCall(ISDuplicate(is[0], &sorted));
1534             PetscCall(PCASMSetLocalSubdomains(inner, 1, &sorted, &loc));
1535             PetscCall(PetscObjectDereference((PetscObject)sorted));
1536           }
1537           if (!PetscBool3ToBool(data->Neumann) && data->N > 1) { /* subdomain matrices are already created for the eigenproblem, reuse them for the fine-level PC */
1538             PetscCall(PCHPDDMPermute_Private(*is, nullptr, nullptr, sub[0], &P, nullptr));
1539             PetscCall(PCHPDDMCommunicationAvoidingPCASM_Private(inner, P, algebraic));
1540             PetscCall(PetscObjectDereference((PetscObject)P));
1541           }
1542         }
1543       }
1544       if (data->N > 1) PetscCall(PCHPDDMDestroySubMatrices_Private(PetscBool3ToBool(data->Neumann), PetscBool(algebraic && !block), sub));
1545     }
1546     PetscCall(ISDestroy(&loc));
1547   } else data->N = 1 + reused; /* enforce this value to 1 + reused if there is no way to build another level */
1548   if (requested != data->N + reused) {
1549     PetscCall(PetscInfo(pc, "%" PetscInt_FMT " levels requested, only %" PetscInt_FMT " built + %" PetscInt_FMT " reused. Options for level(s) > %" PetscInt_FMT ", including -%spc_hpddm_coarse_ will not be taken into account\n", requested, data->N, reused,
1550                         data->N, pcpre ? pcpre : ""));
1551     PetscCall(PetscInfo(pc, "It is best to tune parameters, e.g., a higher value for -%spc_hpddm_levels_%" PetscInt_FMT "_eps_threshold so that at least one local deflation vector will be selected\n", pcpre ? pcpre : "", data->N));
1552     /* cannot use PCDestroy_HPDDMShell() because PCSHELL not set for unassembled levels */
1553     for (n = data->N - 1; n < requested - 1; ++n) {
1554       if (data->levels[n]->P) {
1555         PetscCall(HPDDM::Schwarz<PetscScalar>::destroy(data->levels[n], PETSC_TRUE));
1556         PetscCall(VecDestroyVecs(1, &data->levels[n]->v[0]));
1557         PetscCall(VecDestroyVecs(2, &data->levels[n]->v[1]));
1558         PetscCall(MatDestroy(data->levels[n]->V));
1559         PetscCall(MatDestroy(data->levels[n]->V + 1));
1560         PetscCall(MatDestroy(data->levels[n]->V + 2));
1561         PetscCall(VecDestroy(&data->levels[n]->D));
1562         PetscCall(VecScatterDestroy(&data->levels[n]->scatter));
1563       }
1564     }
1565     if (reused) {
1566       for (n = reused; n < PETSC_PCHPDDM_MAXLEVELS && data->levels[n]; ++n) {
1567         PetscCall(KSPDestroy(&data->levels[n]->ksp));
1568         PetscCall(PCDestroy(&data->levels[n]->pc));
1569       }
1570     }
1571     PetscCheck(!PetscDefined(USE_DEBUG), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " levels requested, only %" PetscInt_FMT " built + %" PetscInt_FMT " reused. Options for level(s) > %" PetscInt_FMT ", including -%spc_hpddm_coarse_ will not be taken into account. It is best to tune parameters, e.g., a higher value for -%spc_hpddm_levels_%" PetscInt_FMT "_eps_threshold so that at least one local deflation vector will be selected. If you don't want this to error out, compile --with-debugging=0", requested,
1572                data->N, reused, data->N, pcpre ? pcpre : "", pcpre ? pcpre : "", data->N);
1573   }
1574   /* these solvers are created after PCSetFromOptions() is called */
1575   if (pc->setfromoptionscalled) {
1576     for (n = 0; n < data->N; ++n) {
1577       if (data->levels[n]->ksp) PetscCall(KSPSetFromOptions(data->levels[n]->ksp));
1578       if (data->levels[n]->pc) PetscCall(PCSetFromOptions(data->levels[n]->pc));
1579     }
1580     pc->setfromoptionscalled = 0;
1581   }
1582   data->N += reused;
1583   if (data->share && swap) {
1584     /* swap back pointers so that variables follow the user-provided numbering */
1585     std::swap(C, data->aux);
1586     std::swap(uis, data->is);
1587     PetscCall(MatDestroy(&C));
1588     PetscCall(ISDestroy(&uis));
1589   }
1590   if (algebraic) PetscCall(MatDestroy(&data->aux));
1591   if (unsorted && unsorted != is[0]) {
1592     PetscCall(ISCopy(unsorted, data->is));
1593     PetscCall(ISDestroy(&unsorted));
1594   }
1595 #if PetscDefined(USE_DEBUG)
1596   PetscCheck((data->is && dis) || (!data->is && !dis), PETSC_COMM_SELF, PETSC_ERR_PLIB, "An IS pointer is NULL but not the other: input IS pointer (%p) v. output IS pointer (%p)", (void *)dis, (void *)data->is);
1597   if (data->is) {
1598     PetscCall(ISEqualUnsorted(data->is, dis, &flg));
1599     PetscCall(ISDestroy(&dis));
1600     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Input IS and output IS are not equal");
1601   }
1602   PetscCheck((data->aux && daux) || (!data->aux && !daux), PETSC_COMM_SELF, PETSC_ERR_PLIB, "A Mat pointer is NULL but not the other: input Mat pointer (%p) v. output Mat pointer (%p)", (void *)daux, (void *)data->aux);
1603   if (data->aux) {
1604     PetscCall(MatMultEqual(data->aux, daux, 20, &flg));
1605     PetscCall(MatDestroy(&daux));
1606     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Input Mat and output Mat are not equal");
1607   }
1608 #endif
1609   PetscFunctionReturn(PETSC_SUCCESS);
1610 }
1611 
1612 /*@
1613      PCHPDDMSetCoarseCorrectionType - Sets the coarse correction type.
1614 
1615    Collective
1616 
1617    Input Parameters:
1618 +     pc - preconditioner context
1619 -     type - `PC_HPDDM_COARSE_CORRECTION_DEFLATED`, `PC_HPDDM_COARSE_CORRECTION_ADDITIVE`, or `PC_HPDDM_COARSE_CORRECTION_BALANCED`
1620 
1621    Options Database Key:
1622 .   -pc_hpddm_coarse_correction <deflated, additive, balanced> - type of coarse correction to apply
1623 
1624    Level: intermediate
1625 
1626 .seealso: `PCHPDDMGetCoarseCorrectionType()`, `PCHPDDM`, `PCHPDDMCoarseCorrectionType`
1627 @*/
1628 PetscErrorCode PCHPDDMSetCoarseCorrectionType(PC pc, PCHPDDMCoarseCorrectionType type)
1629 {
1630   PetscFunctionBegin;
1631   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1632   PetscValidLogicalCollectiveEnum(pc, type, 2);
1633   PetscTryMethod(pc, "PCHPDDMSetCoarseCorrectionType_C", (PC, PCHPDDMCoarseCorrectionType), (pc, type));
1634   PetscFunctionReturn(PETSC_SUCCESS);
1635 }
1636 
1637 /*@
1638      PCHPDDMGetCoarseCorrectionType - Gets the coarse correction type.
1639 
1640    Input Parameter:
1641 .     pc - preconditioner context
1642 
1643    Output Parameter:
1644 .     type - `PC_HPDDM_COARSE_CORRECTION_DEFLATED`, `PC_HPDDM_COARSE_CORRECTION_ADDITIVE`, or `PC_HPDDM_COARSE_CORRECTION_BALANCED`
1645 
1646    Level: intermediate
1647 
1648 .seealso: `PCHPDDMSetCoarseCorrectionType()`, `PCHPDDM`, `PCHPDDMCoarseCorrectionType`
1649 @*/
1650 PetscErrorCode PCHPDDMGetCoarseCorrectionType(PC pc, PCHPDDMCoarseCorrectionType *type)
1651 {
1652   PetscFunctionBegin;
1653   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1654   if (type) {
1655     PetscValidPointer(type, 2);
1656     PetscUseMethod(pc, "PCHPDDMGetCoarseCorrectionType_C", (PC, PCHPDDMCoarseCorrectionType *), (pc, type));
1657   }
1658   PetscFunctionReturn(PETSC_SUCCESS);
1659 }
1660 
1661 static PetscErrorCode PCHPDDMSetCoarseCorrectionType_HPDDM(PC pc, PCHPDDMCoarseCorrectionType type)
1662 {
1663   PC_HPDDM *data = (PC_HPDDM *)pc->data;
1664 
1665   PetscFunctionBegin;
1666   PetscCheck(type >= 0 && type <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PCHPDDMCoarseCorrectionType %d", type);
1667   data->correction = type;
1668   PetscFunctionReturn(PETSC_SUCCESS);
1669 }
1670 
1671 static PetscErrorCode PCHPDDMGetCoarseCorrectionType_HPDDM(PC pc, PCHPDDMCoarseCorrectionType *type)
1672 {
1673   PC_HPDDM *data = (PC_HPDDM *)pc->data;
1674 
1675   PetscFunctionBegin;
1676   *type = data->correction;
1677   PetscFunctionReturn(PETSC_SUCCESS);
1678 }
1679 
1680 /*@
1681      PCHPDDMSetSTShareSubKSP - Sets whether the `KSP` in SLEPc `ST` and the fine-level subdomain solver should be shared.
1682 
1683    Input Parameters:
1684 +     pc - preconditioner context
1685 -     share - whether the `KSP` should be shared or not
1686 
1687    Note:
1688      This is not the same as `PCSetReusePreconditioner()`. Given certain conditions (visible using -info), a symbolic factorization can be skipped
1689      when using a subdomain `PCType` such as `PCLU` or `PCCHOLESKY`.
1690 
1691    Level: advanced
1692 
1693 .seealso: `PCHPDDM`, `PCHPDDMGetSTShareSubKSP()`
1694 @*/
1695 PetscErrorCode PCHPDDMSetSTShareSubKSP(PC pc, PetscBool share)
1696 {
1697   PetscFunctionBegin;
1698   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1699   PetscTryMethod(pc, "PCHPDDMSetSTShareSubKSP_C", (PC, PetscBool), (pc, share));
1700   PetscFunctionReturn(PETSC_SUCCESS);
1701 }
1702 
1703 /*@
1704      PCHPDDMGetSTShareSubKSP - Gets whether the `KSP` in SLEPc `ST` and the fine-level subdomain solver is shared.
1705 
1706    Input Parameter:
1707 .     pc - preconditioner context
1708 
1709    Output Parameter:
1710 .     share - whether the `KSP` is shared or not
1711 
1712    Note:
1713      This is not the same as `PCGetReusePreconditioner()`. The return value is unlikely to be true, but when it is, a symbolic factorization can be skipped
1714      when using a subdomain `PCType` such as `PCLU` or `PCCHOLESKY`.
1715 
1716    Level: advanced
1717 
1718 .seealso: `PCHPDDM`, `PCHPDDMSetSTShareSubKSP()`
1719 @*/
1720 PetscErrorCode PCHPDDMGetSTShareSubKSP(PC pc, PetscBool *share)
1721 {
1722   PetscFunctionBegin;
1723   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1724   if (share) {
1725     PetscValidBoolPointer(share, 2);
1726     PetscUseMethod(pc, "PCHPDDMGetSTShareSubKSP_C", (PC, PetscBool *), (pc, share));
1727   }
1728   PetscFunctionReturn(PETSC_SUCCESS);
1729 }
1730 
1731 static PetscErrorCode PCHPDDMSetSTShareSubKSP_HPDDM(PC pc, PetscBool share)
1732 {
1733   PC_HPDDM *data = (PC_HPDDM *)pc->data;
1734 
1735   PetscFunctionBegin;
1736   data->share = share;
1737   PetscFunctionReturn(PETSC_SUCCESS);
1738 }
1739 
1740 static PetscErrorCode PCHPDDMGetSTShareSubKSP_HPDDM(PC pc, PetscBool *share)
1741 {
1742   PC_HPDDM *data = (PC_HPDDM *)pc->data;
1743 
1744   PetscFunctionBegin;
1745   *share = data->share;
1746   PetscFunctionReturn(PETSC_SUCCESS);
1747 }
1748 
1749 /*@
1750      PCHPDDMSetDeflationMat - Sets the deflation space used to assemble a coarser operator.
1751 
1752    Input Parameters:
1753 +     pc - preconditioner context
1754 .     is - index set of the local deflation matrix
1755 -     U - deflation sequential matrix stored as a `MATSEQDENSE`
1756 
1757    Level: advanced
1758 
1759 .seealso: `PCHPDDM`, `PCDeflationSetSpace()`, `PCMGSetRestriction()`
1760 @*/
1761 PetscErrorCode PCHPDDMSetDeflationMat(PC pc, IS is, Mat U)
1762 {
1763   PetscFunctionBegin;
1764   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1765   PetscValidHeaderSpecific(is, IS_CLASSID, 2);
1766   PetscValidHeaderSpecific(U, MAT_CLASSID, 3);
1767   PetscTryMethod(pc, "PCHPDDMSetDeflationMat_C", (PC, IS, Mat), (pc, is, U));
1768   PetscFunctionReturn(PETSC_SUCCESS);
1769 }
1770 
1771 static PetscErrorCode PCHPDDMSetDeflationMat_HPDDM(PC pc, IS is, Mat U)
1772 {
1773   PetscFunctionBegin;
1774   PetscCall(PCHPDDMSetAuxiliaryMat_Private(pc, is, U, PETSC_TRUE));
1775   PetscFunctionReturn(PETSC_SUCCESS);
1776 }
1777 
1778 PetscErrorCode HPDDMLoadDL_Private(PetscBool *found)
1779 {
1780   PetscBool flg;
1781   char      lib[PETSC_MAX_PATH_LEN], dlib[PETSC_MAX_PATH_LEN], dir[PETSC_MAX_PATH_LEN];
1782 
1783   PetscFunctionBegin;
1784   PetscValidBoolPointer(found, 1);
1785   PetscCall(PetscStrncpy(dir, "${PETSC_LIB_DIR}", sizeof(dir)));
1786   PetscCall(PetscOptionsGetString(nullptr, nullptr, "-hpddm_dir", dir, sizeof(dir), nullptr));
1787   PetscCall(PetscSNPrintf(lib, sizeof(lib), "%s/libhpddm_petsc", dir));
1788   PetscCall(PetscDLLibraryRetrieve(PETSC_COMM_SELF, lib, dlib, 1024, found));
1789 #if defined(SLEPC_LIB_DIR) /* this variable is passed during SLEPc ./configure when PETSc has not been configured   */
1790   if (!*found) {           /* with --download-hpddm since slepcconf.h is not yet built (and thus can't be included) */
1791     PetscCall(PetscStrncpy(dir, HPDDM_STR(SLEPC_LIB_DIR), sizeof(dir)));
1792     PetscCall(PetscSNPrintf(lib, sizeof(lib), "%s/libhpddm_petsc", dir));
1793     PetscCall(PetscDLLibraryRetrieve(PETSC_COMM_SELF, lib, dlib, 1024, found));
1794   }
1795 #endif
1796   if (!*found) { /* probable options for this to evaluate to PETSC_TRUE: system inconsistency (libhpddm_petsc moved by user?) or PETSc configured without --download-slepc */
1797     PetscCall(PetscOptionsGetenv(PETSC_COMM_SELF, "SLEPC_DIR", dir, sizeof(dir), &flg));
1798     if (flg) { /* if both PETSc and SLEPc are configured with --download-hpddm but PETSc has been configured without --download-slepc, one must ensure that libslepc is loaded before libhpddm_petsc */
1799       PetscCall(PetscSNPrintf(lib, sizeof(lib), "%s/lib/libslepc", dir));
1800       PetscCall(PetscDLLibraryRetrieve(PETSC_COMM_SELF, lib, dlib, 1024, found));
1801       PetscCheck(*found, PETSC_COMM_SELF, PETSC_ERR_PLIB, "%s not found but SLEPC_DIR=%s", lib, dir);
1802       PetscCall(PetscDLLibraryAppend(PETSC_COMM_SELF, &PetscDLLibrariesLoaded, dlib));
1803       PetscCall(PetscSNPrintf(lib, sizeof(lib), "%s/lib/libhpddm_petsc", dir)); /* libhpddm_petsc is always in the same directory as libslepc */
1804       PetscCall(PetscDLLibraryRetrieve(PETSC_COMM_SELF, lib, dlib, 1024, found));
1805     }
1806   }
1807   PetscCheck(*found, PETSC_COMM_SELF, PETSC_ERR_PLIB, "%s not found", lib);
1808   PetscCall(PetscDLLibraryAppend(PETSC_COMM_SELF, &PetscDLLibrariesLoaded, dlib));
1809   PetscFunctionReturn(PETSC_SUCCESS);
1810 }
1811 
1812 /*MC
1813      PCHPDDM - Interface with the HPDDM library.
1814 
1815    This `PC` may be used to build multilevel spectral domain decomposition methods based on the GenEO framework [2011, 2019]. It may be viewed as an alternative to spectral
1816    AMGe or `PCBDDC` with adaptive selection of constraints. The interface is explained in details in [2021] (see references below)
1817 
1818    The matrix to be preconditioned (Pmat) may be unassembled (`MATIS`), assembled (`MATAIJ`, `MATBAIJ`, or `MATSBAIJ`), hierarchical (`MATHTOOL`), or `MATNORMAL`.
1819 
1820    For multilevel preconditioning, when using an assembled or hierarchical Pmat, one must provide an auxiliary local `Mat` (unassembled local operator for GenEO) using
1821    `PCHPDDMSetAuxiliaryMat()`. Calling this routine is not needed when using a `MATIS` Pmat, assembly is done internally using `MatConvert()`.
1822 
1823    Options Database Keys:
1824 +   -pc_hpddm_define_subdomains <true, default=false> - on the finest level, calls `PCASMSetLocalSubdomains()` with the `IS` supplied in `PCHPDDMSetAuxiliaryMat()`
1825     (not relevant with an unassembled Pmat)
1826 .   -pc_hpddm_has_neumann <true, default=false> - on the finest level, informs the `PC` that the local Neumann matrix is supplied in `PCHPDDMSetAuxiliaryMat()`
1827 -   -pc_hpddm_coarse_correction <type, default=deflated> - determines the `PCHPDDMCoarseCorrectionType` when calling `PCApply()`
1828 
1829    Options for subdomain solvers, subdomain eigensolvers (for computing deflation vectors), and the coarse solver can be set using the following options database prefixes.
1830 .vb
1831       -pc_hpddm_levels_%d_pc_
1832       -pc_hpddm_levels_%d_ksp_
1833       -pc_hpddm_levels_%d_eps_
1834       -pc_hpddm_levels_%d_p
1835       -pc_hpddm_levels_%d_mat_type
1836       -pc_hpddm_coarse_
1837       -pc_hpddm_coarse_p
1838       -pc_hpddm_coarse_mat_type
1839       -pc_hpddm_coarse_mat_chop
1840 .ve
1841 
1842    E.g., -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_2_p 4 -pc_hpddm_levels_2_sub_pc_type lu -pc_hpddm_levels_2_eps_nev 10
1843     -pc_hpddm_coarse_p 2 -pc_hpddm_coarse_mat_type baij will use 10 deflation vectors per subdomain on the fine "level 1",
1844     aggregate the fine subdomains into 4 "level 2" subdomains, then use 10 deflation vectors per subdomain on "level 2",
1845     and assemble the coarse matrix (of dimension 4 x 10 = 40) on two processes as a `MATBAIJ` (default is `MATSBAIJ`).
1846 
1847    In order to activate a "level N+1" coarse correction, it is mandatory to call -pc_hpddm_levels_N_eps_nev <nu> or -pc_hpddm_levels_N_eps_threshold <val>. The default -pc_hpddm_coarse_p value is 1, meaning that the coarse operator is aggregated on a single process.
1848 
1849    This preconditioner requires that you build PETSc with SLEPc (``--download-slepc``). By default, the underlying concurrent eigenproblems
1850    are solved using SLEPc shift-and-invert spectral transformation. This is usually what gives the best performance for GenEO, cf. [2011, 2013]. As
1851    stated above, SLEPc options are available through -pc_hpddm_levels_%d_, e.g., -pc_hpddm_levels_1_eps_type arpack -pc_hpddm_levels_1_eps_nev 10
1852    -pc_hpddm_levels_1_st_type sinvert. There are furthermore three options related to the (subdomain-wise local) eigensolver that are not described in
1853    SLEPc documentation since they are specific to `PCHPDDM`.
1854 .vb
1855       -pc_hpddm_levels_1_st_share_sub_ksp
1856       -pc_hpddm_levels_%d_eps_threshold
1857       -pc_hpddm_levels_1_eps_use_inertia
1858 .ve
1859 
1860    The first option from the list only applies to the fine-level eigensolver, see `PCHPDDMSetSTShareSubKSP()`. The second option from the list is
1861    used to filter eigenmodes retrieved after convergence of `EPSSolve()` at "level N" such that eigenvectors used to define a "level N+1" coarse
1862    correction are associated to eigenvalues whose magnitude are lower or equal than -pc_hpddm_levels_N_eps_threshold. When using an `EPS` which cannot
1863    determine a priori the proper -pc_hpddm_levels_N_eps_nev such that all wanted eigenmodes are retrieved, it is possible to get an estimation of the
1864    correct value using the third option from the list, -pc_hpddm_levels_1_eps_use_inertia, see `MatGetInertia()`. In that case, there is no need
1865    to supply -pc_hpddm_levels_1_eps_nev. This last option also only applies to the fine-level (N = 1) eigensolver.
1866 
1867    References:
1868 +   2011 - A robust two-level domain decomposition preconditioner for systems of PDEs. Spillane, Dolean, Hauret, Nataf, Pechstein, and Scheichl. Comptes Rendus Mathematique.
1869 .   2013 - Scalable domain decomposition preconditioners for heterogeneous elliptic problems. Jolivet, Hecht, Nataf, and Prud'homme. SC13.
1870 .   2015 - An introduction to domain decomposition methods: algorithms, theory, and parallel implementation. Dolean, Jolivet, and Nataf. SIAM.
1871 .   2019 - A multilevel Schwarz preconditioner based on a hierarchy of robust coarse spaces. Al Daas, Grigori, Jolivet, and Tournier. SIAM Journal on Scientific Computing.
1872 .   2021 - KSPHPDDM and PCHPDDM: extending PETSc with advanced Krylov methods and robust multilevel overlapping Schwarz preconditioners. Jolivet, Roman, and Zampini. Computer & Mathematics with Applications.
1873 .   2022a - A robust algebraic domain decomposition preconditioner for sparse normal equations. Al Daas, Jolivet, and Scott. SIAM Journal on Scientific Computing.
1874 -   2022b - A robust algebraic multilevel domain decomposition preconditioner for sparse symmetric positive definite matrices. Al Daas and Jolivet.
1875 
1876    Level: intermediate
1877 
1878 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHPDDMSetAuxiliaryMat()`, `MATIS`, `PCBDDC`, `PCDEFLATION`, `PCTELESCOPE`, `PCASM`,
1879           `PCHPDDMSetCoarseCorrectionType()`, `PCHPDDMHasNeumannMat()`, `PCHPDDMSetRHSMat()`, `PCHPDDMSetDeflationMat()`, `PCHPDDMSetSTShareSubKSP()`,
1880           `PCHPDDMGetSTShareSubKSP()`, `PCHPDDMGetCoarseCorrectionType()`, `PCHPDDMGetComplexities()`
1881 M*/
1882 PETSC_EXTERN PetscErrorCode PCCreate_HPDDM(PC pc)
1883 {
1884   PC_HPDDM *data;
1885   PetscBool found;
1886 
1887   PetscFunctionBegin;
1888   if (!loadedSym) {
1889     PetscCall(HPDDMLoadDL_Private(&found));
1890     if (found) PetscCall(PetscDLLibrarySym(PETSC_COMM_SELF, &PetscDLLibrariesLoaded, nullptr, "PCHPDDM_Internal", (void **)&loadedSym));
1891   }
1892   PetscCheck(loadedSym, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PCHPDDM_Internal symbol not found in loaded libhpddm_petsc");
1893   PetscCall(PetscNew(&data));
1894   pc->data                = data;
1895   data->Neumann           = PETSC_BOOL3_UNKNOWN;
1896   pc->ops->reset          = PCReset_HPDDM;
1897   pc->ops->destroy        = PCDestroy_HPDDM;
1898   pc->ops->setfromoptions = PCSetFromOptions_HPDDM;
1899   pc->ops->setup          = PCSetUp_HPDDM;
1900   pc->ops->apply          = PCApply_HPDDM;
1901   pc->ops->matapply       = PCMatApply_HPDDM;
1902   pc->ops->view           = PCView_HPDDM;
1903   pc->ops->presolve       = PCPreSolve_HPDDM;
1904 
1905   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetAuxiliaryMat_C", PCHPDDMSetAuxiliaryMat_HPDDM));
1906   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMHasNeumannMat_C", PCHPDDMHasNeumannMat_HPDDM));
1907   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetRHSMat_C", PCHPDDMSetRHSMat_HPDDM));
1908   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetCoarseCorrectionType_C", PCHPDDMSetCoarseCorrectionType_HPDDM));
1909   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMGetCoarseCorrectionType_C", PCHPDDMGetCoarseCorrectionType_HPDDM));
1910   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetSTShareSubKSP_C", PCHPDDMSetSTShareSubKSP_HPDDM));
1911   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMGetSTShareSubKSP_C", PCHPDDMGetSTShareSubKSP_HPDDM));
1912   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHPDDMSetDeflationMat_C", PCHPDDMSetDeflationMat_HPDDM));
1913   PetscFunctionReturn(PETSC_SUCCESS);
1914 }
1915 
1916 /*@C
1917      PCHPDDMInitializePackage - This function initializes everything in the `PCHPDDM` package. It is called from `PCInitializePackage()`.
1918 
1919    Level: developer
1920 
1921 .seealso: `PetscInitialize()`
1922 @*/
1923 PetscErrorCode PCHPDDMInitializePackage(void)
1924 {
1925   char     ename[32];
1926   PetscInt i;
1927 
1928   PetscFunctionBegin;
1929   if (PCHPDDMPackageInitialized) PetscFunctionReturn(PETSC_SUCCESS);
1930   PCHPDDMPackageInitialized = PETSC_TRUE;
1931   PetscCall(PetscRegisterFinalize(PCHPDDMFinalizePackage));
1932   /* general events registered once during package initialization */
1933   /* some of these events are not triggered in libpetsc,          */
1934   /* but rather directly in libhpddm_petsc,                       */
1935   /* which is in charge of performing the following operations    */
1936 
1937   /* domain decomposition structure from Pmat sparsity pattern    */
1938   PetscCall(PetscLogEventRegister("PCHPDDMStrc", PC_CLASSID, &PC_HPDDM_Strc));
1939   /* Galerkin product, redistribution, and setup (not triggered in libpetsc)                */
1940   PetscCall(PetscLogEventRegister("PCHPDDMPtAP", PC_CLASSID, &PC_HPDDM_PtAP));
1941   /* Galerkin product with summation, redistribution, and setup (not triggered in libpetsc) */
1942   PetscCall(PetscLogEventRegister("PCHPDDMPtBP", PC_CLASSID, &PC_HPDDM_PtBP));
1943   /* next level construction using PtAP and PtBP (not triggered in libpetsc)                */
1944   PetscCall(PetscLogEventRegister("PCHPDDMNext", PC_CLASSID, &PC_HPDDM_Next));
1945   static_assert(PETSC_PCHPDDM_MAXLEVELS <= 9, "PETSC_PCHPDDM_MAXLEVELS value is too high");
1946   for (i = 1; i < PETSC_PCHPDDM_MAXLEVELS; ++i) {
1947     PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCHPDDMSetUp L%1" PetscInt_FMT, i));
1948     /* events during a PCSetUp() at level #i _except_ the assembly */
1949     /* of the Galerkin operator of the coarser level #(i + 1)      */
1950     PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_HPDDM_SetUp[i - 1]));
1951     PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCHPDDMSolve L%1" PetscInt_FMT, i));
1952     /* events during a PCApply() at level #i _except_              */
1953     /* the KSPSolve() of the coarser level #(i + 1)                */
1954     PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_HPDDM_Solve[i - 1]));
1955   }
1956   PetscFunctionReturn(PETSC_SUCCESS);
1957 }
1958 
1959 /*@C
1960      PCHPDDMFinalizePackage - This function frees everything from the `PCHPDDM` package. It is called from `PetscFinalize()`.
1961 
1962    Level: developer
1963 
1964 .seealso: `PetscFinalize()`
1965 @*/
1966 PetscErrorCode PCHPDDMFinalizePackage(void)
1967 {
1968   PetscFunctionBegin;
1969   PCHPDDMPackageInitialized = PETSC_FALSE;
1970   PetscFunctionReturn(PETSC_SUCCESS);
1971 }
1972