1 static char help[] = "Reads a PETSc matrix and vector from a file and solves a linear system.\n\
2 This version first preloads and solves a small system, then loads \n\
3 another (larger) system and solves it as well. This example illustrates\n\
4 preloading of instructions with the smaller system so that more accurate\n\
5 performance monitoring can be done with the larger one (that actually\n\
6 is the system of interest). See the 'Performance Hints' chapter of the\n\
7 users manual for a discussion of preloading. Input parameters include\n\
8 -f0 <input_file> : first file to load (small system)\n\
9 -f1 <input_file> : second file to load (larger system)\n\n\
10 -nearnulldim <0> : number of vectors in the near-null space immediately following matrix\n\n\
11 -trans : solve transpose system instead\n\n";
12 /*
13 This code can be used to test PETSc interface to other packages.\n\
14 Examples of command line options: \n\
15 ./ex72 -f0 <datafile> -ksp_type preonly \n\
16 -help -ksp_view \n\
17 -num_numfac <num_numfac> -num_rhs <num_rhs> \n\
18 -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu or superlu_dist or mumps \n\
19 -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps \n\
20 mpiexec -n <np> ./ex72 -f0 <datafile> -ksp_type cg -pc_type asm -pc_asm_type basic -sub_pc_type icc -mat_type sbaij
21 \n\n";
22 */
23
24 /*
25 Include "petscksp.h" so that we can use KSP solvers. Note that this file
26 automatically includes:
27 petscsys.h - base PETSc routines petscvec.h - vectors
28 petscmat.h - matrices
29 petscis.h - index sets petscksp.h - Krylov subspace methods
30 petscviewer.h - viewers petscpc.h - preconditioners
31 */
32 #include <petscksp.h>
33
main(int argc,char ** args)34 int main(int argc, char **args)
35 {
36 KSP ksp; /* linear solver context */
37 Mat A; /* matrix */
38 Vec x, b, u; /* approx solution, RHS, exact solution */
39 PetscViewer viewer; /* viewer */
40 char file[4][PETSC_MAX_PATH_LEN]; /* input file name */
41 PetscBool table = PETSC_FALSE, flg, trans = PETSC_FALSE, initialguess = PETSC_FALSE;
42 PetscBool outputSoln = PETSC_FALSE, constantnullspace = PETSC_FALSE;
43 PetscInt its, num_numfac, m, n, M, p, nearnulldim = 0;
44 PetscReal norm;
45 PetscBool preload = PETSC_TRUE, isSymmetric, cknorm = PETSC_FALSE, initialguessfile = PETSC_FALSE;
46 PetscMPIInt rank;
47 char initialguessfilename[PETSC_MAX_PATH_LEN];
48 char mtype[PETSC_MAX_PATH_LEN];
49
50 PetscFunctionBeginUser;
51 PetscCall(PetscInitialize(&argc, &args, NULL, help));
52 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
53 PetscCall(PetscOptionsGetBool(NULL, NULL, "-table", &table, NULL));
54 PetscCall(PetscOptionsGetBool(NULL, NULL, "-constantnullspace", &constantnullspace, NULL));
55 PetscCall(PetscOptionsGetBool(NULL, NULL, "-trans", &trans, NULL));
56 PetscCall(PetscOptionsGetBool(NULL, NULL, "-initialguess", &initialguess, NULL));
57 PetscCall(PetscOptionsGetBool(NULL, NULL, "-output_solution", &outputSoln, NULL));
58 PetscCall(PetscOptionsGetString(NULL, NULL, "-initialguessfilename", initialguessfilename, sizeof(initialguessfilename), &initialguessfile));
59 PetscCall(PetscOptionsGetInt(NULL, NULL, "-nearnulldim", &nearnulldim, NULL));
60
61 /*
62 Determine files from which we read the two linear systems
63 (matrix and right-hand-side vector).
64 */
65 PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file[0], sizeof(file[0]), &flg));
66 if (flg) {
67 PetscCall(PetscStrncpy(file[1], file[0], sizeof(file[1])));
68 preload = PETSC_FALSE;
69 } else {
70 PetscCall(PetscOptionsGetString(NULL, NULL, "-f0", file[0], sizeof(file[0]), &flg));
71 PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must indicate binary file with the -f0 or -f option");
72 PetscCall(PetscOptionsGetString(NULL, NULL, "-f1", file[1], sizeof(file[1]), &flg));
73 if (!flg) preload = PETSC_FALSE; /* don't bother with second system */
74 }
75
76 /* -----------------------------------------------------------
77 Beginning of linear solver loop
78 ----------------------------------------------------------- */
79 /*
80 Loop through the linear solve 2 times.
81 - The intention here is to preload and solve a small system;
82 then load another (larger) system and solve it as well.
83 This process preloads the instructions with the smaller
84 system so that more accurate performance monitoring (via
85 -log_view) can be done with the larger one (that actually
86 is the system of interest).
87 */
88 PetscPreLoadBegin(preload, "Load system");
89
90 /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
91 Load system
92 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
93
94 /*
95 Open binary file. Note that we use FILE_MODE_READ to indicate
96 reading from this file.
97 */
98 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file[PetscPreLoadIt], FILE_MODE_READ, &viewer));
99
100 /*
101 Load the matrix and vector; then destroy the viewer.
102 */
103 PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
104 PetscCall(MatSetFromOptions(A));
105 PetscCall(MatLoad(A, viewer));
106
107 PetscCall(PetscOptionsGetString(NULL, NULL, "-mat_convert_type", mtype, sizeof(mtype), &flg));
108 if (flg) PetscCall(MatConvert(A, mtype, MAT_INPLACE_MATRIX, &A));
109
110 if (nearnulldim) {
111 MatNullSpace nullsp;
112 Vec *nullvecs;
113 PetscInt i;
114 PetscCall(PetscMalloc1(nearnulldim, &nullvecs));
115 for (i = 0; i < nearnulldim; i++) {
116 PetscCall(VecCreate(PETSC_COMM_WORLD, &nullvecs[i]));
117 PetscCall(VecLoad(nullvecs[i], viewer));
118 }
119 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_FALSE, nearnulldim, nullvecs, &nullsp));
120 PetscCall(MatSetNearNullSpace(A, nullsp));
121 for (i = 0; i < nearnulldim; i++) PetscCall(VecDestroy(&nullvecs[i]));
122 PetscCall(PetscFree(nullvecs));
123 PetscCall(MatNullSpaceDestroy(&nullsp));
124 }
125 if (constantnullspace) {
126 MatNullSpace constant;
127 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &constant));
128 PetscCall(MatSetNullSpace(A, constant));
129 PetscCall(MatNullSpaceDestroy(&constant));
130 }
131 flg = PETSC_FALSE;
132 PetscCall(PetscOptionsGetString(NULL, NULL, "-rhs", file[2], sizeof(file[2]), &flg));
133 PetscCall(VecCreate(PETSC_COMM_WORLD, &b));
134 if (flg) { /* rhs is stored in a separate file */
135 if (file[2][0] == '0' || file[2][0] == 0) {
136 PetscInt m;
137 PetscScalar one = 1.0;
138 PetscCall(PetscInfo(0, "Using vector of ones for RHS\n"));
139 PetscCall(MatGetLocalSize(A, &m, NULL));
140 PetscCall(VecSetSizes(b, m, PETSC_DECIDE));
141 PetscCall(VecSetFromOptions(b));
142 PetscCall(VecSet(b, one));
143 } else {
144 PetscCall(PetscViewerDestroy(&viewer));
145 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file[2], FILE_MODE_READ, &viewer));
146 PetscCall(VecSetFromOptions(b));
147 PetscCall(VecLoad(b, viewer));
148 }
149 } else { /* rhs is stored in the same file as matrix */
150 PetscCall(VecSetFromOptions(b));
151 PetscCall(VecLoad(b, viewer));
152 }
153 PetscCall(PetscViewerDestroy(&viewer));
154
155 /* Make A singular for testing zero-pivot of ilu factorization */
156 /* Example: ./ex72 -f0 <datafile> -test_zeropivot -pc_factor_shift_type <shift_type> */
157 flg = PETSC_FALSE;
158 PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_zeropivot", &flg, NULL));
159 if (flg) { /* set a row as zeros */
160 PetscInt row = 0;
161 PetscCall(MatSetOption(A, MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE));
162 PetscCall(MatZeroRows(A, 1, &row, 0.0, NULL, NULL));
163 }
164
165 /* Check whether A is symmetric, then set A->symmetric option */
166 flg = PETSC_FALSE;
167 PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_symmetry", &flg, NULL));
168 if (flg) {
169 PetscCall(MatIsSymmetric(A, 0.0, &isSymmetric));
170 if (!isSymmetric) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Warning: A is non-symmetric \n"));
171 }
172
173 /*
174 If the loaded matrix is larger than the vector (due to being padded
175 to match the block size of the system), then create a new padded vector.
176 */
177
178 PetscCall(MatGetLocalSize(A, NULL, &n));
179 PetscCall(MatGetSize(A, &M, NULL));
180 PetscCall(VecGetSize(b, &m));
181 PetscCall(VecGetLocalSize(b, &p));
182 preload = (PetscBool)(M != m || p != n); /* Global or local dimension mismatch */
183 PetscCallMPI(MPIU_Allreduce(&preload, &flg, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
184 if (flg) { /* Create a new vector b by padding the old one */
185 PetscInt j, mvec, start, end, indx;
186 Vec tmp;
187 PetscScalar *bold;
188
189 PetscCall(VecCreate(PETSC_COMM_WORLD, &tmp));
190 PetscCall(VecSetSizes(tmp, n, PETSC_DECIDE));
191 PetscCall(VecSetFromOptions(tmp));
192 PetscCall(VecGetOwnershipRange(b, &start, &end));
193 PetscCall(VecGetLocalSize(b, &mvec));
194 PetscCall(VecGetArray(b, &bold));
195 for (j = 0; j < mvec; j++) {
196 indx = start + j;
197 PetscCall(VecSetValues(tmp, 1, &indx, bold + j, INSERT_VALUES));
198 }
199 PetscCall(VecRestoreArray(b, &bold));
200 PetscCall(VecDestroy(&b));
201 PetscCall(VecAssemblyBegin(tmp));
202 PetscCall(VecAssemblyEnd(tmp));
203 b = tmp;
204 }
205
206 PetscCall(MatCreateVecs(A, &x, NULL));
207 PetscCall(VecDuplicate(b, &u));
208 if (initialguessfile) {
209 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, initialguessfilename, FILE_MODE_READ, &viewer));
210 PetscCall(VecLoad(x, viewer));
211 PetscCall(PetscViewerDestroy(&viewer));
212 initialguess = PETSC_TRUE;
213 } else if (initialguess) {
214 PetscCall(VecSet(x, 1.0));
215 } else {
216 PetscCall(VecSet(x, 0.0));
217 }
218
219 /* Check scaling in A */
220 flg = PETSC_FALSE;
221 PetscCall(PetscOptionsGetBool(NULL, NULL, "-check_scaling", &flg, NULL));
222 if (flg) {
223 Vec max, min, l1;
224 PetscInt idx;
225 PetscReal val;
226
227 PetscCall(VecDuplicate(x, &max));
228 PetscCall(VecDuplicate(x, &min));
229 PetscCall(VecDuplicate(x, &l1));
230 PetscCall(MatGetRowMaxAbs(A, max, NULL));
231 PetscCall(MatGetRowMinAbs(A, min, NULL));
232 PetscCall(MatGetRowSumAbs(A, l1));
233 if (PETSC_FALSE) {
234 PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "max.data", &viewer));
235 PetscCall(VecView(max, viewer));
236 PetscCall(PetscViewerDestroy(&viewer));
237 PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "min.data", &viewer));
238 PetscCall(VecView(min, viewer));
239 PetscCall(PetscViewerDestroy(&viewer));
240 PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "l1.data", &viewer));
241 PetscCall(VecView(l1, viewer));
242 PetscCall(PetscViewerDestroy(&viewer));
243 }
244 PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
245 PetscCall(VecMax(max, &idx, &val));
246 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
247 PetscCall(VecView(min, PETSC_VIEWER_DRAW_WORLD));
248 PetscCall(VecMin(min, &idx, &val));
249 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest min row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
250 PetscCall(VecMin(max, &idx, &val));
251 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
252 PetscCall(VecPointwiseDivide(max, max, min));
253 PetscCall(VecMax(max, &idx, &val));
254 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest row ratio %g at row %" PetscInt_FMT "\n", (double)val, idx));
255 PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
256 PetscCall(VecMax(max, &idx, &val));
257 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest row ratio %g at row %" PetscInt_FMT "\n", (double)val, idx));
258 PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
259 PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
260 PetscCall(VecMax(l1, &idx, &val));
261 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest l1 row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
262 PetscCall(VecDestroy(&max));
263 PetscCall(VecDestroy(&min));
264 PetscCall(VecDestroy(&l1));
265 }
266
267 /* PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
268 /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
269 Setup solve for system
270 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
271 /*
272 Conclude profiling last stage; begin profiling next stage.
273 */
274 PetscPreLoadStage("KSPSetUpSolve");
275
276 /*
277 Create linear solver; set operators; set runtime options.
278 */
279 PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
280 PetscCall(KSPSetInitialGuessNonzero(ksp, initialguess));
281 num_numfac = 1;
282 PetscCall(PetscOptionsGetInt(NULL, NULL, "-num_numfac", &num_numfac, NULL));
283 while (num_numfac--) {
284 PC pc;
285 PetscBool lsqr, isbddc, ismatis;
286 char str[32];
287
288 PetscCall(PetscOptionsGetString(NULL, NULL, "-ksp_type", str, sizeof(str), &lsqr));
289 if (lsqr) PetscCall(PetscStrcmp("lsqr", str, &lsqr));
290 if (lsqr) {
291 Mat BtB;
292 PetscCall(MatTransposeMatMult(A, A, MAT_INITIAL_MATRIX, 4, &BtB));
293 PetscCall(KSPSetOperators(ksp, A, BtB));
294 PetscCall(MatDestroy(&BtB));
295 } else {
296 PetscCall(KSPSetOperators(ksp, A, A));
297 }
298 PetscCall(KSPSetFromOptions(ksp));
299
300 /* if we test BDDC, make sure pmat is of type MATIS */
301 PetscCall(KSPGetPC(ksp, &pc));
302 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBDDC, &isbddc));
303 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATIS, &ismatis));
304 if (isbddc && !ismatis) {
305 Mat J;
306
307 PetscCall(MatConvert(A, MATIS, MAT_INITIAL_MATRIX, &J));
308 PetscCall(KSPSetOperators(ksp, A, J));
309 PetscCall(MatDestroy(&J));
310 }
311
312 /*
313 Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
314 enable more precise profiling of setting up the preconditioner.
315 These calls are optional, since both will be called within
316 KSPSolve() if they haven't been called already.
317 */
318 PetscCall(KSPSetUp(ksp));
319 PetscCall(KSPSetUpOnBlocks(ksp));
320
321 /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
322 Solve system
323 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
324
325 /*
326 Solve linear system;
327 */
328 if (trans) {
329 PetscCall(KSPSolveTranspose(ksp, b, x));
330 PetscCall(KSPGetIterationNumber(ksp, &its));
331 } else {
332 PetscInt num_rhs = 1;
333 PetscCall(PetscOptionsGetInt(NULL, NULL, "-num_rhs", &num_rhs, NULL));
334 cknorm = PETSC_FALSE;
335 PetscCall(PetscOptionsGetBool(NULL, NULL, "-cknorm", &cknorm, NULL));
336 while (num_rhs--) {
337 if (num_rhs == 1) PetscCall(VecSet(x, 0.0));
338 PetscCall(KSPSolve(ksp, b, x));
339 }
340 PetscCall(KSPGetIterationNumber(ksp, &its));
341 if (cknorm) { /* Check error for each rhs */
342 if (trans) {
343 PetscCall(MatMultTranspose(A, x, u));
344 } else {
345 PetscCall(MatMult(A, x, u));
346 }
347 PetscCall(VecAXPY(u, -1.0, b));
348 PetscCall(VecNorm(u, NORM_2, &norm));
349 PetscCall(PetscPrintf(PETSC_COMM_WORLD, " Number of iterations = %3" PetscInt_FMT "\n", its));
350 if (!PetscIsNanScalar(norm)) {
351 if (norm < 1.e-12) {
352 PetscCall(PetscPrintf(PETSC_COMM_WORLD, " Residual norm < 1.e-12\n"));
353 } else {
354 PetscCall(PetscPrintf(PETSC_COMM_WORLD, " Residual norm %g\n", (double)norm));
355 }
356 }
357 }
358 } /* while (num_rhs--) */
359
360 /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
361 Check error, print output, free data structures.
362 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
363
364 /*
365 Check error
366 */
367 if (trans) {
368 PetscCall(MatMultTranspose(A, x, u));
369 } else {
370 PetscCall(MatMult(A, x, u));
371 }
372 PetscCall(VecAXPY(u, -1.0, b));
373 PetscCall(VecNorm(u, NORM_2, &norm));
374 /*
375 Write output (optionally using table for solver details).
376 - PetscPrintf() handles output for multiprocessor jobs
377 by printing from only one processor in the communicator.
378 - KSPView() prints information about the linear solver.
379 */
380 if (table) {
381 char *matrixname = NULL, kspinfo[120];
382
383 /*
384 Open a string viewer; then write info to it.
385 */
386 PetscCall(PetscViewerStringOpen(PETSC_COMM_WORLD, kspinfo, sizeof(kspinfo), &viewer));
387 PetscCall(KSPView(ksp, viewer));
388 PetscCall(PetscStrrchr(file[PetscPreLoadIt], '/', &matrixname));
389 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%-8.8s %3" PetscInt_FMT " %2.0e %s \n", matrixname, its, (double)norm, kspinfo));
390
391 /*
392 Destroy the viewer
393 */
394 PetscCall(PetscViewerDestroy(&viewer));
395 } else {
396 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of iterations = %3" PetscInt_FMT "\n", its));
397 if (!PetscIsNanReal(norm)) {
398 if (norm < 1.e-12) {
399 PetscCall(PetscPrintf(PETSC_COMM_WORLD, " Residual norm < 1.e-12\n"));
400 } else {
401 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Residual norm %g\n", (double)norm));
402 }
403 }
404 }
405 PetscCall(PetscOptionsGetString(NULL, NULL, "-solution", file[3], sizeof(file[3]), &flg));
406 if (flg) {
407 Vec xstar;
408 PetscReal norm;
409
410 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file[3], FILE_MODE_READ, &viewer));
411 PetscCall(VecCreate(PETSC_COMM_WORLD, &xstar));
412 PetscCall(VecLoad(xstar, viewer));
413 PetscCall(VecAXPY(xstar, -1.0, x));
414 PetscCall(VecNorm(xstar, NORM_2, &norm));
415 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)norm));
416 PetscCall(VecDestroy(&xstar));
417 PetscCall(PetscViewerDestroy(&viewer));
418 }
419 if (outputSoln) {
420 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "solution.petsc", FILE_MODE_WRITE, &viewer));
421 PetscCall(VecView(x, viewer));
422 PetscCall(PetscViewerDestroy(&viewer));
423 }
424
425 flg = PETSC_FALSE;
426 PetscCall(PetscOptionsGetBool(NULL, NULL, "-ksp_reason", &flg, NULL));
427 if (flg) {
428 KSPConvergedReason reason;
429 PetscCall(KSPGetConvergedReason(ksp, &reason));
430 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "KSPConvergedReason: %s\n", KSPConvergedReasons[reason]));
431 }
432
433 } /* while (num_numfac--) */
434
435 /*
436 Free work space. All PETSc objects should be destroyed when they
437 are no longer needed.
438 */
439 PetscCall(MatDestroy(&A));
440 PetscCall(VecDestroy(&b));
441 PetscCall(VecDestroy(&u));
442 PetscCall(VecDestroy(&x));
443 PetscCall(KSPDestroy(&ksp));
444 PetscPreLoadEnd();
445 /* -----------------------------------------------------------
446 End of linear solver loop
447 ----------------------------------------------------------- */
448
449 PetscCall(PetscFinalize());
450 return 0;
451 }
452
453 /*TEST
454
455 build:
456 requires: !complex
457
458 testset:
459 suffix: 1
460 nsize: 2
461 args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
462 requires: !__float128
463
464 testset:
465 suffix: 1a
466 args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
467 requires: !__float128
468
469 testset:
470 nsize: 2
471 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
472 args: -f0 ${DATAFILESPATH}/matrices/medium
473 args: -ksp_type bicg
474 test:
475 suffix: 2
476
477 testset:
478 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
479 args: -f0 ${DATAFILESPATH}/matrices/medium
480 args: -ksp_type bicg
481 test:
482 suffix: 4
483 args: -pc_type lu
484 test:
485 suffix: 5
486
487 testset:
488 suffix: 6
489 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
490 args: -f0 ${DATAFILESPATH}/matrices/fem1
491 args: -pc_factor_levels 2 -pc_factor_fill 1.73 -ksp_gmres_cgs_refinement_type refine_always
492
493 testset:
494 TODO: Matrix row/column sizes are not compatible with block size
495 suffix: 7
496 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
497 args: -f0 ${DATAFILESPATH}/matrices/medium
498 args: -viewer_binary_skip_info -mat_type seqbaij
499 args: -matload_block_size {{2 3 4 5 6 7 8}separate output}
500 args: -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always
501 args: -ksp_rtol 1.0e-15 -ksp_monitor_short
502 test:
503 suffix: a
504 test:
505 suffix: b
506 args: -pc_factor_mat_ordering_type nd
507 test:
508 suffix: c
509 args: -pc_factor_levels 1
510 test:
511 requires: metis
512 suffix: d
513 args: -pc_factor_mat_ordering_type metisnd
514
515 testset:
516 TODO: Matrix row/column sizes are not compatible with block size
517 suffix: 7_d
518 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
519 args: -f0 ${DATAFILESPATH}/matrices/medium
520 args: -viewer_binary_skip_info -mat_type seqbaij
521 args: -matload_block_size {{2 3 4 5 6 7 8}shared output}
522 args: -ksp_type preonly -pc_type lu
523
524 testset:
525 suffix: 8
526 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
527 args: -f0 ${DATAFILESPATH}/matrices/medium
528 args: -ksp_diagonal_scale -pc_type eisenstat -ksp_monitor_short -ksp_diagonal_scale_fix -ksp_gmres_cgs_refinement_type refine_always -mat_no_inode
529
530 testset:
531 TODO: Matrix row/column sizes are not compatible with block size
532 suffix: 9
533 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
534 args: -f0 ${DATAFILESPATH}/matrices/medium
535 args: -viewer_binary_skip_info -matload_block_size {{1 2 3 4 5 6 7}separate output} -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always -ksp_rtol 1.0e-15 -ksp_monitor_short
536 test:
537 suffix: a
538 args: -mat_type seqbaij
539 test:
540 suffix: b
541 args: -mat_type seqbaij -trans
542 test:
543 suffix: c
544 nsize: 2
545 args: -mat_type mpibaij
546 test:
547 suffix: d
548 nsize: 2
549 args: -mat_type mpibaij -trans
550 test:
551 suffix: e
552 nsize: 3
553 args: -mat_type mpibaij
554 test:
555 suffix: f
556 nsize: 3
557 args: -mat_type mpibaij -trans
558
559 testset:
560 suffix: 10
561 nsize: 2
562 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
563 args: -ksp_type fgmres -pc_type ksp -f0 ${DATAFILESPATH}/matrices/medium -ksp_fgmres_modifypcksp -ksp_monitor_short
564
565 testset:
566 suffix: 12
567 requires: datafilespath matlab
568 args: -pc_type lu -pc_factor_mat_solver_type matlab -f0 ${DATAFILESPATH}/matrices/arco1
569
570 testset:
571 suffix: 13
572 requires: datafilespath lusol
573 args: -f0 ${DATAFILESPATH}/matrices/arco1
574 args: -mat_type lusol -pc_type lu
575
576 testset:
577 nsize: 3
578 args: -f0 ${DATAFILESPATH}/matrices/medium
579 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
580 test:
581 suffix: 14
582 requires: spai
583 args: -pc_type spai
584 test:
585 suffix: 15
586 requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
587 args: -pc_type hypre -pc_hypre_type pilut
588 test:
589 suffix: 16
590 requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
591 args: -pc_type hypre -pc_hypre_type parasails
592 test:
593 suffix: 17
594 requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
595 args: -pc_type hypre -pc_hypre_type boomeramg
596 test:
597 suffix: 18
598 requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
599 args: -pc_type hypre -pc_hypre_type euclid
600 test:
601 suffix: 20
602 requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
603 args: -pc_type hypre -pc_hypre_type ilu
604
605 testset:
606 suffix: 19
607 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
608 args: -f0 ${DATAFILESPATH}/matrices/poisson1
609 args: -ksp_type cg -pc_type icc
610 args: -pc_factor_levels {{0 2 4}separate output}
611 test:
612 test:
613 args: -mat_type seqsbaij
614
615 testset:
616 suffix: ILU
617 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
618 args: -f0 ${DATAFILESPATH}/matrices/small
619 args: -pc_factor_levels 1
620 test:
621 test:
622 # This is tested against regular ILU (used to be denoted ILUBAIJ)
623 args: -mat_type baij
624
625 testset:
626 suffix: aijcusparse
627 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) cuda
628 args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info -mat_type aijcusparse -pc_factor_mat_solver_type cusparse -pc_type ilu -vec_type cuda
629
630 testset:
631 TODO: No output file. Need to determine if deprecated
632 suffix: asm_viennacl
633 nsize: 2
634 requires: viennacl
635 args: -pc_type asm -pc_asm_sub_mat_type aijviennacl -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int${PETSC_INDEX_SIZE}-float${PETSC_SCALAR_SIZE}
636
637 testset:
638 nsize: 2
639 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
640 args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz -ksp_monitor_short -ksp_rtol 1.E-9 -pc_type hypre -pc_hypre_type boomeramg
641 test:
642 suffix: boomeramg_euclid
643 args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01
644 test:
645 suffix: boomeramg_euclid_bj
646 args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01 -pc_hypre_boomeramg_eu_bj
647 test:
648 suffix: boomeramg_parasails
649 args: -pc_hypre_boomeramg_smooth_type ParaSails -pc_hypre_boomeramg_smooth_num_levels 2
650 test:
651 suffix: boomeramg_pilut
652 args: -pc_hypre_boomeramg_smooth_type Pilut -pc_hypre_boomeramg_smooth_num_levels 2
653 test:
654 suffix: boomeramg_schwarz
655 args: -pc_hypre_boomeramg_smooth_type Schwarz-smoothers
656
657 testset:
658 suffix: cg_singlereduction
659 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
660 args: -f0 ${DATAFILESPATH}/matrices/small
661 args: -mat_type mpisbaij -ksp_type cg -pc_type eisenstat -ksp_monitor_short -ksp_converged_reason
662 test:
663 test:
664 args: -ksp_cg_single_reduction
665
666 testset:
667 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
668 args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz
669 args: -ksp_monitor_short -pc_type icc
670 test:
671 suffix: cr
672 args: -ksp_type cr
673 test:
674 suffix: lcd
675 args: -ksp_type lcd
676
677 testset:
678 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
679 args: -f0 ${DATAFILESPATH}/matrices/small
680 args: -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info
681 test:
682 suffix: seqaijcrl
683 args: -mat_type seqaijcrl
684 test:
685 suffix: seqaijperm
686 args: -mat_type seqaijperm
687
688 testset:
689 nsize: 2
690 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
691 args: -f0 ${DATAFILESPATH}/matrices/small
692 args: -ksp_monitor_short -ksp_view
693 # Different output files
694 test:
695 suffix: mpiaijcrl
696 args: -mat_type mpiaijcrl
697 test:
698 suffix: mpiaijperm
699 args: -mat_type mpiaijperm
700
701 testset:
702 nsize: 4
703 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !defined(PETSC_HAVE_I_MPI)
704 args: -ksp_monitor_short -ksp_view
705 test:
706 suffix: xxt
707 args: -f0 ${DATAFILESPATH}/matrices/poisson1 -check_symmetry -check_scaling -ksp_type cg -pc_type tfs
708 test:
709 suffix: xyt
710 args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type gmres -pc_type tfs
711
712 testset:
713 # The output file here is the same as mumps
714 suffix: mumps_cholesky
715 output_file: output/ex72_mumps.out
716 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
717 args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
718 nsize: {{1 2}}
719 test:
720 args: -mat_type sbaij -mat_ignore_lower_triangular
721 test:
722 args: -mat_type aij
723 test:
724 args: -mat_type aij -matload_spd
725
726 testset:
727 # The output file here is the same as mumps
728 suffix: mumps_lu
729 output_file: output/ex72_mumps.out
730 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
731 args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
732 test:
733 args: -mat_type seqaij
734 test:
735 nsize: 2
736 args: -mat_type mpiaij
737 test:
738 args: -mat_type seqbaij -matload_block_size 2
739 test:
740 nsize: 2
741 args: -mat_type mpibaij -matload_block_size 2
742 test:
743 args: -mat_type aij -mat_mumps_icntl_7 5
744 TODO: Need to determine if deprecated
745
746 test:
747 suffix: mumps_lu_parmetis
748 output_file: output/ex72_mumps.out
749 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps parmetis
750 nsize: 2
751 args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 2
752
753 test:
754 suffix: mumps_lu_ptscotch
755 output_file: output/ex72_mumps.out
756 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps ptscotch
757 nsize: 2
758 args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 1
759
760 testset:
761 # The output file here is the same as mumps
762 suffix: mumps_redundant
763 output_file: output/ex72_mumps_redundant.out
764 nsize: 8
765 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
766 args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
767
768 testset:
769 suffix: pastix_cholesky
770 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
771 output_file: output/ex72_mumps.out
772 nsize: {{1 2}}
773 args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2 -pc_type cholesky -mat_type sbaij -mat_ignore_lower_triangular -mat_pastix_thread_nbr 1
774
775 testset:
776 suffix: pastix_lu
777 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
778 args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2 -mat_pastix_thread_nbr 1
779 output_file: output/ex72_mumps.out
780 test:
781 args: -mat_type seqaij
782 test:
783 nsize: 2
784 args: -mat_type mpiaij
785
786 testset:
787 suffix: pastix_redundant
788 output_file: output/ex72_mumps_redundant.out
789 nsize: 8
790 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
791 args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2 -mat_pastix_thread_nbr 1
792
793 testset:
794 suffix: superlu_dist_lu
795 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
796 output_file: output/ex72_mumps.out
797 args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
798 nsize: {{1 2}}
799
800 testset:
801 suffix: superlu_dist_redundant
802 nsize: 8
803 output_file: output/ex72_mumps_redundant.out
804 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
805 args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
806
807 testset:
808 suffix: superlu_lu
809 output_file: output/ex72_mumps.out
810 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu
811 args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu -num_numfac 2 -num_rhs 2
812
813 testset:
814 suffix: umfpack
815 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) suitesparse
816 args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -mat_type seqaij -pc_factor_mat_solver_type umfpack -num_numfac 2 -num_rhs 2
817
818 testset:
819 suffix: zeropivot
820 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
821 args: -f0 ${DATAFILESPATH}/matrices/small -test_zeropivot -ksp_converged_reason -ksp_type fgmres -pc_type ksp -fp_trap 0
822 test:
823 nsize: 3
824 args: -ksp_pc_type bjacobi
825 test:
826 nsize: 2
827 args: -ksp_ksp_type cg -ksp_pc_type bjacobi -ksp_pc_bjacobi_blocks 1
828 #test:
829 #nsize: 3
830 #args: -ksp_ksp_converged_reason -ksp_pc_type bjacobi -ksp_sub_ksp_converged_reason
831 #TODO: Need to determine if deprecated
832
833 testset:
834 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
835 args: -mat_convert_type is -f0 ${DATAFILESPATH}/matrices/medium -ksp_type fgmres
836 test:
837 suffix: aij_gdsw
838 nsize: 4
839 args: -mat_convert_type aij -pc_type mg -pc_mg_levels 2 -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
840 test:
841 output_file: output/ex72_aij_gdsw.out
842 suffix: is_gdsw
843 nsize: 4
844 args: -pc_type mg -pc_mg_levels 2 -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
845 test:
846 suffix: is_asm
847 nsize: {{1 2}separate output}
848 args: -pc_type asm
849 test:
850 suffix: bddc_seq
851 nsize: 1
852 args: -pc_type bddc
853 test:
854 suffix: bddc_par
855 nsize: 2
856 args: -pc_type bddc
857 test:
858 requires: parmetis
859 suffix: bddc_par_nd_parmetis
860 filter: sed -e "s/Number of iterations = [0-9]/Number of iterations = 9/g"
861 nsize: 4
862 args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type parmetis
863 test:
864 requires: ptscotch defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
865 suffix: bddc_par_nd_ptscotch
866 filter: sed -e "s/Number of iterations = [0-9]/Number of iterations = 9/g"
867 nsize: 4
868 args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type ptscotch
869
870 testset:
871 requires: !__float128 hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
872 test:
873 suffix: hpddm_mat
874 output_file: output/ex72_bddc_seq.out
875 filter: sed -e "s/Number of iterations = 2/Number of iterations = 1/g"
876 nsize: 2
877 args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_pc_type mat
878 test:
879 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
880 suffix: hpddm_gen_non_hermitian
881 output_file: output/ex72_2.out
882 nsize: 4
883 args: -f0 ${DATAFILESPATH}/matrices/arco1 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold_absolute 0.7 -ksp_pc_side right
884 test:
885 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps !defined(PETSCTEST_VALGRIND)
886 suffix: hpddm_gen_non_hermitian_baij
887 output_file: output/ex72_10.out
888 nsize: 4
889 timeoutfactor: 2
890 args: -f0 ${DATAFILESPATH}/matrices/arco6 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold_absolute 0.8 -ksp_pc_side right -mat_type baij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_eps_tol 1.0e-2 -ksp_monitor_short
891 test:
892 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
893 suffix: hpddm_too_much_oversampling
894 output_file: output/ex72_2.out
895 nsize: 2
896 filter: sed -e "s/Number of iterations = 3/Number of iterations = 10/g"
897 args: -f0 ${DATAFILESPATH}/matrices/small -pc_type hpddm -pc_hpddm_levels_1_svd_nsv 1 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_harmonic_overlap 3 -pc_hpddm_coarse_mat_type aij
898
899 # BDDC multiple subdomains per process tests
900 test:
901 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
902 suffix: matis_bddc_multisub_3d
903 nsize: {{1 2 3 4 5 6 7 8}}
904 args: -f ${DATAFILESPATH}/matrices/matis/poisson_DMDA_9x9x9_3x3x3.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_use_faces
905
906 test:
907 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
908 suffix: matis_bddc_multisub_2d
909 nsize: {{1 2 3 4 5 6 7 8}}
910 args: -f ${DATAFILESPATH}/matrices/matis/poisson_DMDA_9x9_3x3.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is
911
912 test:
913 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
914 suffix: matis_bddc_multisub_plex_2d
915 nsize: {{1 2 3 4 5 6 7 8}}
916 args: -f ${DATAFILESPATH}/matrices/matis/poisson_DMPLEX_32x32_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is
917
918 test:
919 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
920 suffix: matis_bddc_multisub_plex_3d
921 nsize: {{1 2 3 4 5 6 7 8}}
922 args: -f ${DATAFILESPATH}/matrices/matis/poisson_DMPLEX_16x16x16_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is
923
924 test:
925 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
926 suffix: matis_bddc_multisub_plex_3d_deluxe
927 nsize: {{1 2 3 4 5 6 7 8}}
928 args: -f ${DATAFILESPATH}/matrices/matis/poisson_DMPLEX_16x16x16_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_use_deluxe_scaling
929
930 test:
931 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
932 suffix: matis_bddc_multisub_hcurl_2d
933 nsize: {{1 2 3 4 5 6 7 8}}
934 args: -f ${DATAFILESPATH}/matrices/matis/hcurl_mfem_amrquad2_16.dat -pc_bddc_load ${DATAFILESPATH}/matrices/matis/bddc_hcurl_mfem_amrquad2_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_local_mat_graph_square 1
935
936 test:
937 requires: datafilespath double mumps !defined(PETSC_USE_64BIT_INDICES)
938 suffix: matis_bddc_multisub_hcurl_2d_adaptive
939 nsize: {{1 2 3 4 5 6 7 8}}
940 args: -f ${DATAFILESPATH}/matrices/matis/hcurl_mfem_amrquad2_16.dat -pc_bddc_load ${DATAFILESPATH}/matrices/matis/bddc_hcurl_mfem_amrquad2_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_local_mat_graph_square 1 -pc_bddc_use_deluxe_scaling -pc_bddc_adaptive_threshold 2 -pc_bddc_schur_exact {{0 1}}
941
942 test:
943 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
944 suffix: matis_bddc_multisub_hdiv_3d
945 nsize: {{1 2 3 4 5 6 7 8}}
946 args: -f ${DATAFILESPATH}/matrices/matis/hdiv_mfem_inlinehex2_16.dat -pc_bddc_load ${DATAFILESPATH}/matrices/matis/bddc_hdiv_mfem_inlinehex2_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_use_local_mat_graph 0
947
948 test:
949 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
950 suffix: matis_bddc_multisub_hcurl_3d
951 nsize: {{1 3 4 8}}
952 args: -f ${DATAFILESPATH}/matrices/matis/hcurl_mfem_inlinehex_16.dat -pc_bddc_load ${DATAFILESPATH}/matrices/matis/bddc_hcurl_mfem_inlinehex_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_local_mat_graph_square 1
953
954 test:
955 requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
956 suffix: matis_bddc_multisub_hcurl_3d_amr
957 nsize: {{1 3 4 8}}
958 args: -f ${DATAFILESPATH}/matrices/matis/hcurl_mfem_amrhex_16.dat -pc_bddc_load ${DATAFILESPATH}/matrices/matis/bddc_hcurl_mfem_amrhex_16.dat -pc_type bddc -ksp_type cg -ksp_norm_type natural -ksp_error_if_not_converged -mat_type is -pc_bddc_local_mat_graph_square 1
959
960 TEST*/
961