xref: /petsc/src/mat/impls/sbaij/seq/cholmod/sbaijcholmod.c (revision 517f4e3460cd8c7e73c19b8bf533f1efe47f30a7)
1 /*
2    Provides an interface to the CHOLMOD sparse solver available through SuiteSparse version 4.2.1
3 
4    When built with PETSC_USE_64BIT_INDICES this will use Suitesparse_long as the
5    integer type in UMFPACK, otherwise it will use int. This means
6    all integers in this file as simply declared as PetscInt. Also it means
7    that one cannot use 64BIT_INDICES on 32-bit pointer systems [as Suitesparse_long is 32-bit only]
8 
9 */
10 
11 #include <../src/mat/impls/sbaij/seq/sbaij.h>
12 #include <../src/mat/impls/sbaij/seq/cholmod/cholmodimpl.h>
13 
14 /*
15    This is a terrible hack, but it allows the error handler to retain a context.
16    Note that this hack really cannot be made both reentrant and concurrent.
17 */
18 static Mat static_F;
19 
20 static void CholmodErrorHandler(int status, const char *file, int line, const char *message)
21 {
22   PetscFunctionBegin;
23   if (status > CHOLMOD_OK) {
24     PetscCallVoid(PetscInfo(static_F, "CHOLMOD warning %d at %s:%d: %s\n", status, file, line, message));
25   } else if (status == CHOLMOD_OK) { /* Documentation says this can happen, but why? */
26     PetscCallVoid(PetscInfo(static_F, "CHOLMOD OK at %s:%d: %s\n", file, line, message));
27   } else {
28     PetscCallVoid(PetscErrorPrintf("CHOLMOD error %d at %s:%d: %s\n", status, file, line, message));
29   }
30   PetscFunctionReturnVoid();
31 }
32 
33 #define CHOLMOD_OPTION_DOUBLE(name, help) \
34   do { \
35     PetscReal tmp = (PetscReal)c->name; \
36     PetscCall(PetscOptionsReal("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL)); \
37     c->name = (double)tmp; \
38   } while (0)
39 
40 #define CHOLMOD_OPTION_INT(name, help) \
41   do { \
42     PetscInt tmp = (PetscInt)c->name; \
43     PetscCall(PetscOptionsInt("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL)); \
44     c->name = (int)tmp; \
45   } while (0)
46 
47 #define CHOLMOD_OPTION_SIZE_T(name, help) \
48   do { \
49     PetscReal tmp = (PetscInt)c->name; \
50     PetscCall(PetscOptionsReal("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL)); \
51     PetscCheck(tmp >= 0, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "value must be positive"); \
52     c->name = (size_t)tmp; \
53   } while (0)
54 
55 #define CHOLMOD_OPTION_BOOL(name, help) \
56   do { \
57     PetscBool tmp = (PetscBool) !!c->name; \
58     PetscCall(PetscOptionsBool("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL)); \
59     c->name = (int)tmp; \
60   } while (0)
61 
62 static PetscErrorCode CholmodSetOptions(Mat F)
63 {
64   Mat_CHOLMOD    *chol = (Mat_CHOLMOD *)F->data;
65   cholmod_common *c    = chol->common;
66   PetscBool       flg;
67 
68   PetscFunctionBegin;
69   PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "CHOLMOD Options", "Mat");
70   CHOLMOD_OPTION_INT(nmethods, "Number of different ordering methods to try");
71 
72 #if defined(PETSC_USE_SUITESPARSE_GPU)
73   c->useGPU = 1;
74   CHOLMOD_OPTION_INT(useGPU, "Use GPU for BLAS 1, otherwise 0");
75   CHOLMOD_OPTION_SIZE_T(maxGpuMemBytes, "Maximum memory to allocate on the GPU");
76   CHOLMOD_OPTION_DOUBLE(maxGpuMemFraction, "Fraction of available GPU memory to allocate");
77 #endif
78 
79   /* CHOLMOD handles first-time packing and refactor-packing separately, but we usually want them to be the same. */
80   chol->pack = (PetscBool)c->final_pack;
81   PetscCall(PetscOptionsBool("-mat_cholmod_pack", "Pack factors after factorization [disable for frequent repeat factorization]", "None", chol->pack, &chol->pack, NULL));
82   c->final_pack = (int)chol->pack;
83 
84   CHOLMOD_OPTION_DOUBLE(dbound, "Minimum absolute value of diagonal entries of D");
85   CHOLMOD_OPTION_DOUBLE(grow0, "Global growth ratio when factors are modified");
86   CHOLMOD_OPTION_DOUBLE(grow1, "Column growth ratio when factors are modified");
87   CHOLMOD_OPTION_SIZE_T(grow2, "Affine column growth constant when factors are modified");
88   CHOLMOD_OPTION_SIZE_T(maxrank, "Max rank of update, larger values are faster but use more memory [2,4,8]");
89   {
90     static const char *const list[] = {"SIMPLICIAL", "AUTO", "SUPERNODAL", "MatCholmodFactorType", "MAT_CHOLMOD_FACTOR_", 0};
91     PetscCall(PetscOptionsEnum("-mat_cholmod_factor", "Factorization method", "None", list, (PetscEnum)c->supernodal, (PetscEnum *)&c->supernodal, NULL));
92   }
93   if (c->supernodal) CHOLMOD_OPTION_DOUBLE(supernodal_switch, "flop/nnz_L threshold for switching to supernodal factorization");
94   CHOLMOD_OPTION_BOOL(final_asis, "Leave factors \"as is\"");
95   CHOLMOD_OPTION_BOOL(final_pack, "Pack the columns when finished (use FALSE if the factors will be updated later)");
96   if (!c->final_asis) {
97     CHOLMOD_OPTION_BOOL(final_super, "Leave supernodal factors instead of converting to simplicial");
98     CHOLMOD_OPTION_BOOL(final_ll, "Turn LDL' factorization into LL'");
99     CHOLMOD_OPTION_BOOL(final_monotonic, "Ensure columns are monotonic when done");
100     CHOLMOD_OPTION_BOOL(final_resymbol, "Remove numerically zero values resulting from relaxed supernodal amalgamation");
101   }
102   {
103     PetscReal tmp[] = {(PetscReal)c->zrelax[0], (PetscReal)c->zrelax[1], (PetscReal)c->zrelax[2]};
104     PetscInt  n     = 3;
105     PetscCall(PetscOptionsRealArray("-mat_cholmod_zrelax", "3 real supernodal relaxed amalgamation parameters", "None", tmp, &n, &flg));
106     PetscCheck(!flg || n == 3, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "must provide exactly 3 parameters to -mat_cholmod_zrelax");
107     if (flg)
108       while (n--) c->zrelax[n] = (double)tmp[n];
109   }
110   {
111     PetscInt n, tmp[] = {(PetscInt)c->nrelax[0], (PetscInt)c->nrelax[1], (PetscInt)c->nrelax[2]};
112     PetscCall(PetscOptionsIntArray("-mat_cholmod_nrelax", "3 size_t supernodal relaxed amalgamation parameters", "None", tmp, &n, &flg));
113     PetscCheck(!flg || n == 3, PetscObjectComm((PetscObject)F), PETSC_ERR_ARG_OUTOFRANGE, "must provide exactly 3 parameters to -mat_cholmod_nrelax");
114     if (flg)
115       while (n--) c->nrelax[n] = (size_t)tmp[n];
116   }
117   CHOLMOD_OPTION_BOOL(prefer_upper, "Work with upper triangular form [faster when using fill-reducing ordering, slower in natural ordering]");
118   CHOLMOD_OPTION_BOOL(default_nesdis, "Use NESDIS instead of METIS for nested dissection");
119   CHOLMOD_OPTION_INT(print, "Verbosity level");
120   PetscOptionsEnd();
121   PetscFunctionReturn(PETSC_SUCCESS);
122 }
123 
124 PetscErrorCode CholmodStart(Mat F)
125 {
126   Mat_CHOLMOD    *chol = (Mat_CHOLMOD *)F->data;
127   cholmod_common *c;
128 
129   PetscFunctionBegin;
130   if (chol->common) PetscFunctionReturn(PETSC_SUCCESS);
131   PetscCall(PetscMalloc1(1, &chol->common));
132   PetscCallExternal(!cholmod_X_start, chol->common);
133 
134   c                = chol->common;
135   c->error_handler = CholmodErrorHandler;
136   PetscFunctionReturn(PETSC_SUCCESS);
137 }
138 
139 static PetscErrorCode MatWrapCholmod_seqsbaij(Mat A, PetscBool values, cholmod_sparse *C, PetscBool *aijalloc, PetscBool *valloc)
140 {
141   Mat_SeqSBAIJ *sbaij    = (Mat_SeqSBAIJ *)A->data;
142   PetscBool     vallocin = PETSC_FALSE;
143 
144   PetscFunctionBegin;
145   PetscCall(PetscMemzero(C, sizeof(*C)));
146   /* CHOLMOD uses column alignment, SBAIJ stores the upper factor, so we pass it on as a lower factor, swapping the meaning of row and column */
147   C->nrow  = (size_t)A->cmap->n;
148   C->ncol  = (size_t)A->rmap->n;
149   C->nzmax = (size_t)sbaij->maxnz;
150   C->p     = sbaij->i;
151   C->i     = sbaij->j;
152   if (values) {
153 #if defined(PETSC_USE_COMPLEX)
154     /* we need to pass CHOLMOD the conjugate matrix */
155     PetscScalar *v;
156     PetscInt     i;
157 
158     PetscCall(PetscMalloc1(sbaij->maxnz, &v));
159     for (i = 0; i < sbaij->maxnz; i++) v[i] = PetscConj(sbaij->a[i]);
160     C->x     = v;
161     vallocin = PETSC_TRUE;
162 #else
163     C->x = sbaij->a;
164 #endif
165   }
166   C->stype  = -1;
167   C->itype  = CHOLMOD_INT_TYPE;
168   C->xtype  = values ? CHOLMOD_SCALAR_TYPE : CHOLMOD_PATTERN;
169   C->dtype  = CHOLMOD_DOUBLE;
170   C->sorted = 1;
171   C->packed = 1;
172   *aijalloc = PETSC_FALSE;
173   *valloc   = vallocin;
174   PetscFunctionReturn(PETSC_SUCCESS);
175 }
176 
177 #define GET_ARRAY_READ  0
178 #define GET_ARRAY_WRITE 1
179 
180 PetscErrorCode VecWrapCholmod(Vec X, PetscInt rw, cholmod_dense *Y)
181 {
182   PetscScalar *x;
183   PetscInt     n;
184 
185   PetscFunctionBegin;
186   PetscCall(PetscMemzero(Y, sizeof(*Y)));
187   switch (rw) {
188   case GET_ARRAY_READ:
189     PetscCall(VecGetArrayRead(X, (const PetscScalar **)&x));
190     break;
191   case GET_ARRAY_WRITE:
192     PetscCall(VecGetArrayWrite(X, &x));
193     break;
194   default:
195     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
196     break;
197   }
198   PetscCall(VecGetSize(X, &n));
199 
200   Y->x     = x;
201   Y->nrow  = n;
202   Y->ncol  = 1;
203   Y->nzmax = n;
204   Y->d     = n;
205   Y->xtype = CHOLMOD_SCALAR_TYPE;
206   Y->dtype = CHOLMOD_DOUBLE;
207   PetscFunctionReturn(PETSC_SUCCESS);
208 }
209 
210 PetscErrorCode VecUnWrapCholmod(Vec X, PetscInt rw, cholmod_dense *Y)
211 {
212   PetscFunctionBegin;
213   switch (rw) {
214   case GET_ARRAY_READ:
215     PetscCall(VecRestoreArrayRead(X, (const PetscScalar **)&Y->x));
216     break;
217   case GET_ARRAY_WRITE:
218     PetscCall(VecRestoreArrayWrite(X, (PetscScalar **)&Y->x));
219     break;
220   default:
221     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
222     break;
223   }
224   PetscFunctionReturn(PETSC_SUCCESS);
225 }
226 
227 PetscErrorCode MatDenseWrapCholmod(Mat X, PetscInt rw, cholmod_dense *Y)
228 {
229   PetscScalar *x;
230   PetscInt     m, n, lda;
231 
232   PetscFunctionBegin;
233   PetscCall(PetscMemzero(Y, sizeof(*Y)));
234   switch (rw) {
235   case GET_ARRAY_READ:
236     PetscCall(MatDenseGetArrayRead(X, (const PetscScalar **)&x));
237     break;
238   case GET_ARRAY_WRITE:
239     PetscCall(MatDenseGetArrayWrite(X, &x));
240     break;
241   default:
242     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
243     break;
244   }
245   PetscCall(MatDenseGetLDA(X, &lda));
246   PetscCall(MatGetLocalSize(X, &m, &n));
247 
248   Y->x     = x;
249   Y->nrow  = m;
250   Y->ncol  = n;
251   Y->nzmax = lda * n;
252   Y->d     = lda;
253   Y->xtype = CHOLMOD_SCALAR_TYPE;
254   Y->dtype = CHOLMOD_DOUBLE;
255   PetscFunctionReturn(PETSC_SUCCESS);
256 }
257 
258 PetscErrorCode MatDenseUnWrapCholmod(Mat X, PetscInt rw, cholmod_dense *Y)
259 {
260   PetscFunctionBegin;
261   switch (rw) {
262   case GET_ARRAY_READ:
263     PetscCall(MatDenseRestoreArrayRead(X, (const PetscScalar **)&Y->x));
264     break;
265   case GET_ARRAY_WRITE:
266     /* we don't have MatDenseRestoreArrayWrite */
267     PetscCall(MatDenseRestoreArray(X, (PetscScalar **)&Y->x));
268     break;
269   default:
270     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
271     break;
272   }
273   PetscFunctionReturn(PETSC_SUCCESS);
274 }
275 
276 PETSC_INTERN PetscErrorCode MatDestroy_CHOLMOD(Mat F)
277 {
278   Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
279 
280   PetscFunctionBegin;
281   if (chol->spqrfact) PetscCallExternal(!SuiteSparseQR_C_free, &chol->spqrfact, chol->common);
282   if (chol->factor) PetscCallExternal(!cholmod_X_free_factor, &chol->factor, chol->common);
283   if (chol->common->itype == CHOLMOD_INT) {
284     PetscCallExternal(!cholmod_finish, chol->common);
285   } else {
286     PetscCallExternal(!cholmod_l_finish, chol->common);
287   }
288   PetscCall(PetscFree(chol->common));
289   PetscCall(PetscFree(chol->matrix));
290   PetscCall(PetscObjectComposeFunction((PetscObject)F, "MatFactorGetSolverType_C", NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)F, "MatQRFactorSymbolic_C", NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)F, "MatQRFactorNumeric_C", NULL));
293   PetscCall(PetscFree(F->data));
294   PetscFunctionReturn(PETSC_SUCCESS);
295 }
296 
297 static PetscErrorCode MatSolve_CHOLMOD(Mat, Vec, Vec);
298 static PetscErrorCode MatMatSolve_CHOLMOD(Mat, Mat, Mat);
299 
300 /*static const char *const CholmodOrderingMethods[] = {"User","AMD","METIS","NESDIS(default)","Natural","NESDIS(small=20000)","NESDIS(small=4,no constrained)","NESDIS()"};*/
301 
302 static PetscErrorCode MatView_Info_CHOLMOD(Mat F, PetscViewer viewer)
303 {
304   Mat_CHOLMOD          *chol = (Mat_CHOLMOD *)F->data;
305   const cholmod_common *c    = chol->common;
306   PetscInt              i;
307 
308   PetscFunctionBegin;
309   if (F->ops->solve != MatSolve_CHOLMOD) PetscFunctionReturn(PETSC_SUCCESS);
310   PetscCall(PetscViewerASCIIPrintf(viewer, "CHOLMOD run parameters:\n"));
311   PetscCall(PetscViewerASCIIPushTab(viewer));
312   PetscCall(PetscViewerASCIIPrintf(viewer, "Pack factors after symbolic factorization: %s\n", chol->pack ? "TRUE" : "FALSE"));
313   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.dbound            %g  (Smallest absolute value of diagonal entries of D)\n", c->dbound));
314   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.grow0             %g\n", c->grow0));
315   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.grow1             %g\n", c->grow1));
316   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.grow2             %u\n", (unsigned)c->grow2));
317   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.maxrank           %u\n", (unsigned)c->maxrank));
318   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.supernodal_switch %g\n", c->supernodal_switch));
319   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.supernodal        %d\n", c->supernodal));
320   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_asis        %d\n", c->final_asis));
321   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_super       %d\n", c->final_super));
322   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_ll          %d\n", c->final_ll));
323   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_pack        %d\n", c->final_pack));
324   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_monotonic   %d\n", c->final_monotonic));
325   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.final_resymbol    %d\n", c->final_resymbol));
326   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.zrelax            [%g,%g,%g]\n", c->zrelax[0], c->zrelax[1], c->zrelax[2]));
327   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.nrelax            [%u,%u,%u]\n", (unsigned)c->nrelax[0], (unsigned)c->nrelax[1], (unsigned)c->nrelax[2]));
328   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.prefer_upper      %d\n", c->prefer_upper));
329   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.print             %d\n", c->print));
330   for (i = 0; i < c->nmethods; i++) {
331     PetscCall(PetscViewerASCIIPrintf(viewer, "Ordering method %" PetscInt_FMT "%s:\n", i, i == c->selected ? " [SELECTED]" : ""));
332     PetscCall(PetscViewerASCIIPrintf(viewer, "  lnz %g, fl %g, prune_dense %g, prune_dense2 %g\n", c->method[i].lnz, c->method[i].fl, c->method[i].prune_dense, c->method[i].prune_dense2));
333   }
334   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.postorder         %d\n", c->postorder));
335   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.default_nesdis    %d (use NESDIS instead of METIS for nested dissection)\n", c->default_nesdis));
336   /* Statistics */
337   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.fl                %g (flop count from most recent analysis)\n", c->fl));
338   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.lnz               %g (fundamental nz in L)\n", c->lnz));
339   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.anz               %g\n", c->anz));
340   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.modfl             %g (flop count from most recent update)\n", c->modfl));
341   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.malloc_count      %g (number of live objects)\n", (double)c->malloc_count));
342   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.memory_usage      %g (peak memory usage in bytes)\n", (double)c->memory_usage));
343   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.memory_inuse      %g (current memory usage in bytes)\n", (double)c->memory_inuse));
344   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.nrealloc_col      %g (number of column reallocations)\n", c->nrealloc_col));
345   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.nrealloc_factor   %g (number of factor reallocations due to column reallocations)\n", c->nrealloc_factor));
346   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.ndbounds_hit      %g (number of times diagonal was modified by dbound)\n", c->ndbounds_hit));
347   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.rowfacfl          %g (number of flops in last call to cholmod_rowfac)\n", c->rowfacfl));
348   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.aatfl             %g (number of flops to compute A(:,f)*A(:,f)')\n", c->aatfl));
349 #if defined(PETSC_USE_SUITESPARSE_GPU)
350   PetscCall(PetscViewerASCIIPrintf(viewer, "Common.useGPU            %d\n", c->useGPU));
351 #endif
352   PetscCall(PetscViewerASCIIPopTab(viewer));
353   PetscFunctionReturn(PETSC_SUCCESS);
354 }
355 
356 PETSC_INTERN PetscErrorCode MatView_CHOLMOD(Mat F, PetscViewer viewer)
357 {
358   PetscBool         iascii;
359   PetscViewerFormat format;
360 
361   PetscFunctionBegin;
362   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
363   if (iascii) {
364     PetscCall(PetscViewerGetFormat(viewer, &format));
365     if (format == PETSC_VIEWER_ASCII_INFO) PetscCall(MatView_Info_CHOLMOD(F, viewer));
366   }
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 static PetscErrorCode MatSolve_CHOLMOD(Mat F, Vec B, Vec X)
371 {
372   Mat_CHOLMOD  *chol = (Mat_CHOLMOD *)F->data;
373   cholmod_dense cholB, cholX, *X_handle, *Y_handle = NULL, *E_handle = NULL;
374 
375   PetscFunctionBegin;
376   static_F = F;
377   PetscCall(VecWrapCholmod(B, GET_ARRAY_READ, &cholB));
378   PetscCall(VecWrapCholmod(X, GET_ARRAY_WRITE, &cholX));
379   X_handle = &cholX;
380   PetscCallExternal(!cholmod_X_solve2, CHOLMOD_A, chol->factor, &cholB, NULL, &X_handle, NULL, &Y_handle, &E_handle, chol->common);
381   PetscCallExternal(!cholmod_X_free_dense, &Y_handle, chol->common);
382   PetscCallExternal(!cholmod_X_free_dense, &E_handle, chol->common);
383   PetscCall(VecUnWrapCholmod(B, GET_ARRAY_READ, &cholB));
384   PetscCall(VecUnWrapCholmod(X, GET_ARRAY_WRITE, &cholX));
385   PetscCall(PetscLogFlops(4.0 * chol->common->lnz));
386   PetscFunctionReturn(PETSC_SUCCESS);
387 }
388 
389 static PetscErrorCode MatMatSolve_CHOLMOD(Mat F, Mat B, Mat X)
390 {
391   Mat_CHOLMOD  *chol = (Mat_CHOLMOD *)F->data;
392   cholmod_dense cholB, cholX, *X_handle, *Y_handle = NULL, *E_handle = NULL;
393 
394   PetscFunctionBegin;
395   static_F = F;
396   PetscCall(MatDenseWrapCholmod(B, GET_ARRAY_READ, &cholB));
397   PetscCall(MatDenseWrapCholmod(X, GET_ARRAY_WRITE, &cholX));
398   X_handle = &cholX;
399   PetscCallExternal(!cholmod_X_solve2, CHOLMOD_A, chol->factor, &cholB, NULL, &X_handle, NULL, &Y_handle, &E_handle, chol->common);
400   PetscCallExternal(!cholmod_X_free_dense, &Y_handle, chol->common);
401   PetscCallExternal(!cholmod_X_free_dense, &E_handle, chol->common);
402   PetscCall(MatDenseUnWrapCholmod(B, GET_ARRAY_READ, &cholB));
403   PetscCall(MatDenseUnWrapCholmod(X, GET_ARRAY_WRITE, &cholX));
404   PetscCall(PetscLogFlops(4.0 * B->cmap->n * chol->common->lnz));
405   PetscFunctionReturn(PETSC_SUCCESS);
406 }
407 
408 static PetscErrorCode MatCholeskyFactorNumeric_CHOLMOD(Mat F, Mat A, const MatFactorInfo *info)
409 {
410   Mat_CHOLMOD   *chol = (Mat_CHOLMOD *)F->data;
411   cholmod_sparse cholA;
412   PetscBool      aijalloc, valloc;
413   int            err;
414 
415   PetscFunctionBegin;
416   PetscCall((*chol->Wrap)(A, PETSC_TRUE, &cholA, &aijalloc, &valloc));
417   static_F = F;
418   err      = !cholmod_X_factorize(&cholA, chol->factor, chol->common);
419   PetscCheck(!err, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CHOLMOD factorization failed with status %d", chol->common->status);
420   PetscCheck(chol->common->status != CHOLMOD_NOT_POSDEF, PetscObjectComm((PetscObject)F), PETSC_ERR_MAT_CH_ZRPVT, "CHOLMOD detected that the matrix is not positive definite, failure at column %u", (unsigned)chol->factor->minor);
421 
422   PetscCall(PetscLogFlops(chol->common->fl));
423   if (aijalloc) PetscCall(PetscFree2(cholA.p, cholA.i));
424   if (valloc) PetscCall(PetscFree(cholA.x));
425 #if defined(PETSC_USE_SUITESPARSE_GPU)
426   PetscCall(PetscLogGpuTimeAdd(chol->common->CHOLMOD_GPU_GEMM_TIME + chol->common->CHOLMOD_GPU_SYRK_TIME + chol->common->CHOLMOD_GPU_TRSM_TIME + chol->common->CHOLMOD_GPU_POTRF_TIME));
427 #endif
428 
429   F->ops->solve             = MatSolve_CHOLMOD;
430   F->ops->solvetranspose    = MatSolve_CHOLMOD;
431   F->ops->matsolve          = MatMatSolve_CHOLMOD;
432   F->ops->matsolvetranspose = MatMatSolve_CHOLMOD;
433   PetscFunctionReturn(PETSC_SUCCESS);
434 }
435 
436 PETSC_INTERN PetscErrorCode MatCholeskyFactorSymbolic_CHOLMOD(Mat F, Mat A, IS perm, const MatFactorInfo *info)
437 {
438   Mat_CHOLMOD   *chol = (Mat_CHOLMOD *)F->data;
439   int            err;
440   cholmod_sparse cholA;
441   PetscBool      aijalloc, valloc;
442   PetscInt      *fset  = 0;
443   size_t         fsize = 0;
444 
445   PetscFunctionBegin;
446   /* Set options to F */
447   PetscCall(CholmodSetOptions(F));
448 
449   PetscCall((*chol->Wrap)(A, PETSC_FALSE, &cholA, &aijalloc, &valloc));
450   static_F = F;
451   if (chol->factor) {
452     err = !cholmod_X_resymbol(&cholA, fset, fsize, (int)chol->pack, chol->factor, chol->common);
453     PetscCheck(!err, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CHOLMOD analysis failed with status %d", chol->common->status);
454   } else if (perm) {
455     const PetscInt *ip;
456     PetscCall(ISGetIndices(perm, &ip));
457     chol->factor = cholmod_X_analyze_p(&cholA, (PetscInt *)ip, fset, fsize, chol->common);
458     PetscCheck(chol->factor, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CHOLMOD analysis failed using PETSc ordering with status %d", chol->common->status);
459     PetscCall(ISRestoreIndices(perm, &ip));
460   } else {
461     chol->factor = cholmod_X_analyze(&cholA, chol->common);
462     PetscCheck(chol->factor, PetscObjectComm((PetscObject)F), PETSC_ERR_LIB, "CHOLMOD analysis failed using internal ordering with status %d", chol->common->status);
463   }
464 
465   if (aijalloc) PetscCall(PetscFree2(cholA.p, cholA.i));
466   if (valloc) PetscCall(PetscFree(cholA.x));
467 
468   F->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_CHOLMOD;
469   PetscFunctionReturn(PETSC_SUCCESS);
470 }
471 
472 static PetscErrorCode MatFactorGetSolverType_seqsbaij_cholmod(Mat A, MatSolverType *type)
473 {
474   PetscFunctionBegin;
475   *type = MATSOLVERCHOLMOD;
476   PetscFunctionReturn(PETSC_SUCCESS);
477 }
478 
479 PETSC_INTERN PetscErrorCode MatGetInfo_CHOLMOD(Mat F, MatInfoType flag, MatInfo *info)
480 {
481   Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
482 
483   PetscFunctionBegin;
484   info->block_size        = 1.0;
485   info->nz_allocated      = chol->common->lnz;
486   info->nz_used           = chol->common->lnz;
487   info->nz_unneeded       = 0.0;
488   info->assemblies        = 0.0;
489   info->mallocs           = 0.0;
490   info->memory            = chol->common->memory_inuse;
491   info->fill_ratio_given  = 0;
492   info->fill_ratio_needed = 0;
493   info->factor_mallocs    = chol->common->malloc_count;
494   PetscFunctionReturn(PETSC_SUCCESS);
495 }
496 
497 /*MC
498   MATSOLVERCHOLMOD
499 
500   A matrix type providing direct solvers (Cholesky) for sequential matrices
501   via the external package CHOLMOD.
502 
503   Use `./configure --download-suitesparse` to install PETSc to use CHOLMOD
504 
505   Use `-pc_type cholesky` `-pc_factor_mat_solver_type cholmod` to use this direct solver
506 
507   Consult CHOLMOD documentation for more information about the common parameters
508   which correspond to the options database keys below.
509 
510   Options Database Keys:
511 + -mat_cholmod_dbound <0>          - Minimum absolute value of diagonal entries of D (None)
512 . -mat_cholmod_grow0 <1.2>         - Global growth ratio when factors are modified (None)
513 . -mat_cholmod_grow1 <1.2>         - Column growth ratio when factors are modified (None)
514 . -mat_cholmod_grow2 <5>           - Affine column growth constant when factors are modified (None)
515 . -mat_cholmod_maxrank <8>         - Max rank of update, larger values are faster but use more memory [2,4,8] (None)
516 . -mat_cholmod_factor <AUTO>       - (choose one of) `SIMPLICIAL`, `AUTO`, `SUPERNODAL`
517 . -mat_cholmod_supernodal_switch <40> - flop/nnz_L threshold for switching to supernodal factorization (None)
518 . -mat_cholmod_final_asis <TRUE>   - Leave factors "as is" (None)
519 . -mat_cholmod_final_pack <TRUE>   - Pack the columns when finished (use FALSE if the factors will be updated later) (None)
520 . -mat_cholmod_zrelax <0.8>        - 3 real supernodal relaxed amalgamation parameters (None)
521 . -mat_cholmod_nrelax <4>          - 3 size_t supernodal relaxed amalgamation parameters (None)
522 . -mat_cholmod_prefer_upper <TRUE> - Work with upper triangular form (faster when using fill-reducing ordering, slower in natural ordering) (None)
523 . -mat_cholmod_print <3>           - Verbosity level (None)
524 - -mat_ordering_type internal      - Use the ordering provided by Cholmod
525 
526    Level: beginner
527 
528    Note:
529    CHOLMOD is part of SuiteSparse http://faculty.cse.tamu.edu/davis/suitesparse.html
530 
531 .seealso: [](ch_matrices), `Mat`, `PCCHOLESKY`, `PCFactorSetMatSolverType()`, `MatSolverType`
532 M*/
533 
534 PETSC_INTERN PetscErrorCode MatGetFactor_seqsbaij_cholmod(Mat A, MatFactorType ftype, Mat *F)
535 {
536   Mat          B;
537   Mat_CHOLMOD *chol;
538   PetscInt     m = A->rmap->n, n = A->cmap->n, bs;
539 
540   PetscFunctionBegin;
541   PetscCall(MatGetBlockSize(A, &bs));
542   *F = NULL;
543   if (bs != 1) {
544     PetscCall(PetscInfo(A, "CHOLMOD only supports block size=1.\n"));
545     PetscFunctionReturn(PETSC_SUCCESS);
546   }
547 #if defined(PETSC_USE_COMPLEX)
548   if (A->hermitian != PETSC_BOOL3_TRUE) {
549     PetscCall(PetscInfo(A, "Only for Hermitian matrices.\n"));
550     PetscFunctionReturn(PETSC_SUCCESS);
551   }
552 #endif
553   /* Create the factorization matrix F */
554   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
555   PetscCall(MatSetSizes(B, PETSC_DECIDE, PETSC_DECIDE, m, n));
556   PetscCall(PetscStrallocpy("cholmod", &((PetscObject)B)->type_name));
557   PetscCall(MatSetUp(B));
558   PetscCall(PetscNew(&chol));
559 
560   chol->Wrap = MatWrapCholmod_seqsbaij;
561   B->data    = chol;
562 
563   B->ops->getinfo                = MatGetInfo_CHOLMOD;
564   B->ops->view                   = MatView_CHOLMOD;
565   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_CHOLMOD;
566   B->ops->destroy                = MatDestroy_CHOLMOD;
567   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_seqsbaij_cholmod));
568   B->factortype   = MAT_FACTOR_CHOLESKY;
569   B->assembled    = PETSC_TRUE;
570   B->preallocated = PETSC_TRUE;
571 
572   PetscCall(CholmodStart(B));
573 
574   PetscCall(PetscFree(B->solvertype));
575   PetscCall(PetscStrallocpy(MATSOLVERCHOLMOD, &B->solvertype));
576   B->canuseordering = PETSC_TRUE;
577   PetscCall(PetscStrallocpy(MATORDERINGEXTERNAL, (char **)&B->preferredordering[MAT_FACTOR_CHOLESKY]));
578   *F = B;
579   PetscFunctionReturn(PETSC_SUCCESS);
580 }
581