xref: /petsc/src/ksp/ksp/tutorials/ex72.c (revision 3fc23ce8277e00a65c4b1be6e2b4535b56ba7978)
1 
2 static char help[] = "Reads a PETSc matrix and vector from a file and solves a linear system.\n\
3 This version first preloads and solves a small system, then loads \n\
4 another (larger) system and solves it as well.  This example illustrates\n\
5 preloading of instructions with the smaller system so that more accurate\n\
6 performance monitoring can be done with the larger one (that actually\n\
7 is the system of interest).  See the 'Performance Hints' chapter of the\n\
8 users manual for a discussion of preloading.  Input parameters include\n\
9   -f0 <input_file> : first file to load (small system)\n\
10   -f1 <input_file> : second file to load (larger system)\n\n\
11   -nearnulldim <0> : number of vectors in the near-null space immediately following matrix\n\n\
12   -trans  : solve transpose system instead\n\n";
13 /*
14   This code can be used to test PETSc interface to other packages.\n\
15   Examples of command line options:       \n\
16    ./ex72 -f0 <datafile> -ksp_type preonly  \n\
17         -help -ksp_view                  \n\
18         -num_numfac <num_numfac> -num_rhs <num_rhs> \n\
19         -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu or superlu_dist or mumps \n\
20         -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps \n\
21    mpiexec -n <np> ./ex72 -f0 <datafile> -ksp_type cg -pc_type asm -pc_asm_type basic -sub_pc_type icc -mat_type sbaij
22  \n\n";
23 */
24 
25 /*
26   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
27   automatically includes:
28      petscsys.h       - base PETSc routines   petscvec.h - vectors
29      petscmat.h - matrices
30      petscis.h     - index sets            petscksp.h - Krylov subspace methods
31      petscviewer.h - viewers               petscpc.h  - preconditioners
32 */
33 #include <petscksp.h>
34 
35 int main(int argc,char **args)
36 {
37   KSP            ksp;             /* linear solver context */
38   Mat            A;               /* matrix */
39   Vec            x,b,u;           /* approx solution, RHS, exact solution */
40   PetscViewer    viewer;          /* viewer */
41   char           file[4][PETSC_MAX_PATH_LEN];     /* input file name */
42   PetscBool      table     =PETSC_FALSE,flg,trans=PETSC_FALSE,initialguess = PETSC_FALSE;
43   PetscBool      outputSoln=PETSC_FALSE,constantnullspace = PETSC_FALSE;
44   PetscInt       its,num_numfac,m,n,M,p,nearnulldim = 0;
45   PetscReal      norm;
46   PetscBool      preload=PETSC_TRUE,isSymmetric,cknorm=PETSC_FALSE,initialguessfile = PETSC_FALSE;
47   PetscMPIInt    rank;
48   char           initialguessfilename[PETSC_MAX_PATH_LEN];
49 
50   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
51   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
52   PetscCall(PetscOptionsGetBool(NULL,NULL,"-table",&table,NULL));
53   PetscCall(PetscOptionsGetBool(NULL,NULL,"-constantnullspace",&constantnullspace,NULL));
54   PetscCall(PetscOptionsGetBool(NULL,NULL,"-trans",&trans,NULL));
55   PetscCall(PetscOptionsGetBool(NULL,NULL,"-initialguess",&initialguess,NULL));
56   PetscCall(PetscOptionsGetBool(NULL,NULL,"-output_solution",&outputSoln,NULL));
57   PetscCall(PetscOptionsGetString(NULL,NULL,"-initialguessfilename",initialguessfilename,sizeof(initialguessfilename),&initialguessfile));
58   PetscCall(PetscOptionsGetInt(NULL,NULL,"-nearnulldim",&nearnulldim,NULL));
59 
60   /*
61      Determine files from which we read the two linear systems
62      (matrix and right-hand-side vector).
63   */
64   PetscCall(PetscOptionsGetString(NULL,NULL,"-f",file[0],sizeof(file[0]),&flg));
65   if (flg) {
66     PetscCall(PetscStrcpy(file[1],file[0]));
67     preload = PETSC_FALSE;
68   } else {
69     PetscCall(PetscOptionsGetString(NULL,NULL,"-f0",file[0],sizeof(file[0]),&flg));
70     PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must indicate binary file with the -f0 or -f option");
71     PetscCall(PetscOptionsGetString(NULL,NULL,"-f1",file[1],sizeof(file[1]),&flg));
72     if (!flg) preload = PETSC_FALSE;   /* don't bother with second system */
73   }
74 
75   /* -----------------------------------------------------------
76                   Beginning of linear solver loop
77      ----------------------------------------------------------- */
78   /*
79      Loop through the linear solve 2 times.
80       - The intention here is to preload and solve a small system;
81         then load another (larger) system and solve it as well.
82         This process preloads the instructions with the smaller
83         system so that more accurate performance monitoring (via
84         -log_view) can be done with the larger one (that actually
85         is the system of interest).
86   */
87   PetscPreLoadBegin(preload,"Load system");
88 
89   /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
90                          Load system
91    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
92 
93   /*
94      Open binary file.  Note that we use FILE_MODE_READ to indicate
95      reading from this file.
96   */
97   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file[PetscPreLoadIt],FILE_MODE_READ,&viewer));
98 
99   /*
100      Load the matrix and vector; then destroy the viewer.
101   */
102   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
103   PetscCall(MatSetFromOptions(A));
104   PetscCall(MatLoad(A,viewer));
105   if (nearnulldim) {
106     MatNullSpace nullsp;
107     Vec          *nullvecs;
108     PetscInt     i;
109     PetscCall(PetscMalloc1(nearnulldim,&nullvecs));
110     for (i=0; i<nearnulldim; i++) {
111       PetscCall(VecCreate(PETSC_COMM_WORLD,&nullvecs[i]));
112       PetscCall(VecLoad(nullvecs[i],viewer));
113     }
114     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_FALSE,nearnulldim,nullvecs,&nullsp));
115     PetscCall(MatSetNearNullSpace(A,nullsp));
116     for (i=0; i<nearnulldim; i++) PetscCall(VecDestroy(&nullvecs[i]));
117     PetscCall(PetscFree(nullvecs));
118     PetscCall(MatNullSpaceDestroy(&nullsp));
119   }
120   if (constantnullspace) {
121     MatNullSpace constant;
122     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&constant));
123     PetscCall(MatSetNullSpace(A,constant));
124     PetscCall(MatNullSpaceDestroy(&constant));
125   }
126   flg  = PETSC_FALSE;
127   PetscCall(PetscOptionsGetString(NULL,NULL,"-rhs",file[2],sizeof(file[2]),&flg));
128   PetscCall(VecCreate(PETSC_COMM_WORLD,&b));
129   if (flg) {   /* rhs is stored in a separate file */
130     if (file[2][0] == '0' || file[2][0] == 0) {
131       PetscInt    m;
132       PetscScalar one = 1.0;
133       PetscCall(PetscInfo(0,"Using vector of ones for RHS\n"));
134       PetscCall(MatGetLocalSize(A,&m,NULL));
135       PetscCall(VecSetSizes(b,m,PETSC_DECIDE));
136       PetscCall(VecSetFromOptions(b));
137       PetscCall(VecSet(b,one));
138     } else {
139       PetscCall(PetscViewerDestroy(&viewer));
140       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file[2],FILE_MODE_READ,&viewer));
141       PetscCall(VecSetFromOptions(b));
142       PetscCall(VecLoad(b,viewer));
143     }
144   } else {   /* rhs is stored in the same file as matrix */
145     PetscCall(VecSetFromOptions(b));
146     PetscCall(VecLoad(b,viewer));
147   }
148   PetscCall(PetscViewerDestroy(&viewer));
149 
150   /* Make A singular for testing zero-pivot of ilu factorization */
151   /* Example: ./ex72 -f0 <datafile> -test_zeropivot -pc_factor_shift_type <shift_type> */
152   flg  = PETSC_FALSE;
153   PetscCall(PetscOptionsGetBool(NULL,NULL, "-test_zeropivot", &flg,NULL));
154   if (flg) { /* set a row as zeros */
155     PetscInt          row=0;
156     PetscCall(MatSetOption(A,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE));
157     PetscCall(MatZeroRows(A,1,&row,0.0,NULL,NULL));
158   }
159 
160   /* Check whether A is symmetric, then set A->symmetric option */
161   flg = PETSC_FALSE;
162   PetscCall(PetscOptionsGetBool(NULL,NULL, "-check_symmetry", &flg,NULL));
163   if (flg) {
164     PetscCall(MatIsSymmetric(A,0.0,&isSymmetric));
165     if (!isSymmetric) {
166       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Warning: A is non-symmetric \n"));
167     }
168   }
169 
170   /*
171      If the loaded matrix is larger than the vector (due to being padded
172      to match the block size of the system), then create a new padded vector.
173   */
174 
175   PetscCall(MatGetLocalSize(A,NULL,&n));
176   PetscCall(MatGetSize(A,&M,NULL));
177   PetscCall(VecGetSize(b,&m));
178   PetscCall(VecGetLocalSize(b,&p));
179   preload = (PetscBool)(M != m || p != n); /* Global or local dimension mismatch */
180   PetscCall(MPIU_Allreduce(&preload,&flg,1,MPIU_BOOL,MPI_LOR,PetscObjectComm((PetscObject)A)));
181   if (flg) { /* Create a new vector b by padding the old one */
182     PetscInt    j,mvec,start,end,indx;
183     Vec         tmp;
184     PetscScalar *bold;
185 
186     PetscCall(VecCreate(PETSC_COMM_WORLD,&tmp));
187     PetscCall(VecSetSizes(tmp,n,PETSC_DECIDE));
188     PetscCall(VecSetFromOptions(tmp));
189     PetscCall(VecGetOwnershipRange(b,&start,&end));
190     PetscCall(VecGetLocalSize(b,&mvec));
191     PetscCall(VecGetArray(b,&bold));
192     for (j=0; j<mvec; j++) {
193       indx = start+j;
194       PetscCall(VecSetValues(tmp,1,&indx,bold+j,INSERT_VALUES));
195     }
196     PetscCall(VecRestoreArray(b,&bold));
197     PetscCall(VecDestroy(&b));
198     PetscCall(VecAssemblyBegin(tmp));
199     PetscCall(VecAssemblyEnd(tmp));
200     b    = tmp;
201   }
202 
203   PetscCall(MatCreateVecs(A,&x,NULL));
204   PetscCall(VecDuplicate(b,&u));
205   if (initialguessfile) {
206     PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,initialguessfilename,FILE_MODE_READ,&viewer));
207     PetscCall(VecLoad(x,viewer));
208     PetscCall(PetscViewerDestroy(&viewer));
209     initialguess = PETSC_TRUE;
210   } else if (initialguess) {
211     PetscCall(VecSet(x,1.0));
212   } else {
213     PetscCall(VecSet(x,0.0));
214   }
215 
216   /* Check scaling in A */
217   flg  = PETSC_FALSE;
218   PetscCall(PetscOptionsGetBool(NULL,NULL, "-check_scaling", &flg,NULL));
219   if (flg) {
220     Vec       max, min;
221     PetscInt  idx;
222     PetscReal val;
223 
224     PetscCall(VecDuplicate(x, &max));
225     PetscCall(VecDuplicate(x, &min));
226     PetscCall(MatGetRowMaxAbs(A, max, NULL));
227     PetscCall(MatGetRowMinAbs(A, min, NULL));
228     {
229       PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "max.data", &viewer));
230       PetscCall(VecView(max, viewer));
231       PetscCall(PetscViewerDestroy(&viewer));
232       PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "min.data", &viewer));
233       PetscCall(VecView(min, viewer));
234       PetscCall(PetscViewerDestroy(&viewer));
235     }
236     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
237     PetscCall(VecMax(max, &idx, &val));
238     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
239     PetscCall(VecView(min, PETSC_VIEWER_DRAW_WORLD));
240     PetscCall(VecMin(min, &idx, &val));
241     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest min row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
242     PetscCall(VecMin(max, &idx, &val));
243     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
244     PetscCall(VecPointwiseDivide(max, max, min));
245     PetscCall(VecMax(max, &idx, &val));
246     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest row ratio %g at row %" PetscInt_FMT "\n", (double)val, idx));
247     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
248     PetscCall(VecDestroy(&max));
249     PetscCall(VecDestroy(&min));
250   }
251 
252   /*  PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
253   /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
254                     Setup solve for system
255    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
256   /*
257      Conclude profiling last stage; begin profiling next stage.
258   */
259   PetscPreLoadStage("KSPSetUpSolve");
260 
261   /*
262      Create linear solver; set operators; set runtime options.
263   */
264   PetscCall(KSPCreate(PETSC_COMM_WORLD,&ksp));
265   PetscCall(KSPSetInitialGuessNonzero(ksp,initialguess));
266   num_numfac = 1;
267   PetscCall(PetscOptionsGetInt(NULL,NULL,"-num_numfac",&num_numfac,NULL));
268   while (num_numfac--) {
269     PC        pc;
270     PetscBool lsqr,isbddc,ismatis;
271     char      str[32];
272 
273     PetscCall(PetscOptionsGetString(NULL,NULL,"-ksp_type",str,sizeof(str),&lsqr));
274     if (lsqr) {
275       PetscCall(PetscStrcmp("lsqr",str,&lsqr));
276     }
277     if (lsqr) {
278       Mat BtB;
279       PetscCall(MatTransposeMatMult(A,A,MAT_INITIAL_MATRIX,4,&BtB));
280       PetscCall(KSPSetOperators(ksp,A,BtB));
281       PetscCall(MatDestroy(&BtB));
282     } else {
283       PetscCall(KSPSetOperators(ksp,A,A));
284     }
285     PetscCall(KSPSetFromOptions(ksp));
286 
287     /* if we test BDDC, make sure pmat is of type MATIS */
288     PetscCall(KSPGetPC(ksp,&pc));
289     PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCBDDC,&isbddc));
290     PetscCall(PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis));
291     if (isbddc && !ismatis) {
292       Mat J;
293 
294       PetscCall(MatConvert(A,MATIS,MAT_INITIAL_MATRIX,&J));
295       PetscCall(KSPSetOperators(ksp,A,J));
296       PetscCall(MatDestroy(&J));
297     }
298 
299     /*
300      Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
301      enable more precise profiling of setting up the preconditioner.
302      These calls are optional, since both will be called within
303      KSPSolve() if they haven't been called already.
304     */
305     PetscCall(KSPSetUp(ksp));
306     PetscCall(KSPSetUpOnBlocks(ksp));
307 
308     /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
309                          Solve system
310       - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
311 
312     /*
313      Solve linear system;
314     */
315     if (trans) {
316       PetscCall(KSPSolveTranspose(ksp,b,x));
317       PetscCall(KSPGetIterationNumber(ksp,&its));
318     } else {
319       PetscInt num_rhs=1;
320       PetscCall(PetscOptionsGetInt(NULL,NULL,"-num_rhs",&num_rhs,NULL));
321       cknorm = PETSC_FALSE;
322       PetscCall(PetscOptionsGetBool(NULL,NULL,"-cknorm",&cknorm,NULL));
323       while (num_rhs--) {
324         if (num_rhs == 1) VecSet(x,0.0);
325         PetscCall(KSPSolve(ksp,b,x));
326       }
327       PetscCall(KSPGetIterationNumber(ksp,&its));
328       if (cknorm) {     /* Check error for each rhs */
329         if (trans) {
330           PetscCall(MatMultTranspose(A,x,u));
331         } else {
332           PetscCall(MatMult(A,x,u));
333         }
334         PetscCall(VecAXPY(u,-1.0,b));
335         PetscCall(VecNorm(u,NORM_2,&norm));
336         PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Number of iterations = %3" PetscInt_FMT "\n",its));
337         if (!PetscIsNanScalar(norm)) {
338           if (norm < 1.e-12) {
339             PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Residual norm < 1.e-12\n"));
340           } else {
341             PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Residual norm %g\n",(double)norm));
342           }
343         }
344       }
345     }   /* while (num_rhs--) */
346 
347     /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
348           Check error, print output, free data structures.
349      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
350 
351     /*
352        Check error
353     */
354     if (trans) {
355       PetscCall(MatMultTranspose(A,x,u));
356     } else {
357       PetscCall(MatMult(A,x,u));
358     }
359     PetscCall(VecAXPY(u,-1.0,b));
360     PetscCall(VecNorm(u,NORM_2,&norm));
361     /*
362      Write output (optinally using table for solver details).
363       - PetscPrintf() handles output for multiprocessor jobs
364         by printing from only one processor in the communicator.
365       - KSPView() prints information about the linear solver.
366     */
367     if (table) {
368       char        *matrixname,kspinfo[120];
369 
370       /*
371        Open a string viewer; then write info to it.
372       */
373       PetscCall(PetscViewerStringOpen(PETSC_COMM_WORLD,kspinfo,sizeof(kspinfo),&viewer));
374       PetscCall(KSPView(ksp,viewer));
375       PetscCall(PetscStrrchr(file[PetscPreLoadIt],'/',&matrixname));
376       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%-8.8s %3" PetscInt_FMT " %2.0e %s \n",matrixname,its,(double)norm,kspinfo));
377 
378       /*
379         Destroy the viewer
380       */
381       PetscCall(PetscViewerDestroy(&viewer));
382     } else {
383       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %3" PetscInt_FMT "\n",its));
384       if (!PetscIsNanScalar(norm)) {
385         if (norm < 1.e-12 && !PetscIsNanScalar((PetscScalar)norm)) {
386           PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Residual norm < 1.e-12\n"));
387         } else {
388           PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Residual norm %g\n",(double)norm));
389         }
390       }
391     }
392     PetscCall(PetscOptionsGetString(NULL,NULL,"-solution",file[3],sizeof(file[3]),&flg));
393     if (flg) {
394       Vec         xstar;
395       PetscReal   norm;
396 
397       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file[3],FILE_MODE_READ,&viewer));
398       PetscCall(VecCreate(PETSC_COMM_WORLD,&xstar));
399       PetscCall(VecLoad(xstar,viewer));
400       PetscCall(VecAXPY(xstar, -1.0, x));
401       PetscCall(VecNorm(xstar, NORM_2, &norm));
402       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)norm));
403       PetscCall(VecDestroy(&xstar));
404       PetscCall(PetscViewerDestroy(&viewer));
405     }
406     if (outputSoln) {
407       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"solution.petsc",FILE_MODE_WRITE,&viewer));
408       PetscCall(VecView(x, viewer));
409       PetscCall(PetscViewerDestroy(&viewer));
410     }
411 
412     flg  = PETSC_FALSE;
413     PetscCall(PetscOptionsGetBool(NULL,NULL, "-ksp_reason", &flg,NULL));
414     if (flg) {
415       KSPConvergedReason reason;
416       PetscCall(KSPGetConvergedReason(ksp,&reason));
417       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"KSPConvergedReason: %s\n", KSPConvergedReasons[reason]));
418     }
419 
420   }   /* while (num_numfac--) */
421 
422   /*
423      Free work space.  All PETSc objects should be destroyed when they
424      are no longer needed.
425   */
426   PetscCall(MatDestroy(&A)); PetscCall(VecDestroy(&b));
427   PetscCall(VecDestroy(&u)); PetscCall(VecDestroy(&x));
428   PetscCall(KSPDestroy(&ksp));
429   PetscPreLoadEnd();
430   /* -----------------------------------------------------------
431                       End of linear solver loop
432      ----------------------------------------------------------- */
433 
434   PetscCall(PetscFinalize());
435   return 0;
436 }
437 
438 /*TEST
439 
440    build:
441       requires: !complex
442 
443    testset:
444       suffix: 1
445       nsize: 2
446       args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
447       requires: !__float128
448 
449    testset:
450       suffix: 1a
451       args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
452       requires: !__float128
453 
454    testset:
455       nsize: 2
456       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
457       args: -f0 ${DATAFILESPATH}/matrices/medium
458       args:  -ksp_type bicg
459       test:
460          suffix: 2
461 
462    testset:
463       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
464       args: -f0 ${DATAFILESPATH}/matrices/medium
465       args: -ksp_type bicg
466       test:
467          suffix: 4
468          args: -pc_type lu
469       test:
470          suffix: 5
471 
472    testset:
473       suffix: 6
474       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
475       args: -f0 ${DATAFILESPATH}/matrices/fem1
476       args: -pc_factor_levels 2 -pc_factor_fill 1.73 -ksp_gmres_cgs_refinement_type refine_always
477 
478    testset:
479       TODO: Matrix row/column sizes are not compatible with block size
480       suffix: 7
481       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
482       args: -f0 ${DATAFILESPATH}/matrices/medium
483       args: -viewer_binary_skip_info -mat_type seqbaij
484       args: -matload_block_size {{2 3 4 5 6 7 8}separate output}
485       args: -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always
486       args: -ksp_rtol 1.0e-15 -ksp_monitor_short
487       test:
488          suffix: a
489       test:
490          suffix: b
491          args: -pc_factor_mat_ordering_type nd
492       test:
493          suffix: c
494          args: -pc_factor_levels 1
495       test:
496          requires: metis
497          suffix: d
498          args: -pc_factor_mat_ordering_type metisnd
499 
500    testset:
501       TODO: Matrix row/column sizes are not compatible with block size
502       suffix: 7_d
503       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
504       args: -f0 ${DATAFILESPATH}/matrices/medium
505       args: -viewer_binary_skip_info -mat_type seqbaij
506       args: -matload_block_size {{2 3 4 5 6 7 8}shared output}
507       args: -ksp_type preonly -pc_type lu
508 
509    testset:
510       suffix: 8
511       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
512       args: -f0 ${DATAFILESPATH}/matrices/medium
513       args: -ksp_diagonal_scale -pc_type eisenstat -ksp_monitor_short -ksp_diagonal_scale_fix -ksp_gmres_cgs_refinement_type refine_always -mat_no_inode
514 
515    testset:
516       TODO: Matrix row/column sizes are not compatible with block size
517       suffix: 9
518       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
519       args: -f0 ${DATAFILESPATH}/matrices/medium
520       args: -viewer_binary_skip_info  -matload_block_size {{1 2 3 4 5 6 7}separate output} -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always -ksp_rtol 1.0e-15 -ksp_monitor_short
521       test:
522          suffix: a
523          args: -mat_type seqbaij
524       test:
525          suffix: b
526          args: -mat_type seqbaij -trans
527       test:
528          suffix: c
529          nsize: 2
530          args: -mat_type mpibaij
531       test:
532          suffix: d
533          nsize: 2
534          args: -mat_type mpibaij -trans
535       test:
536          suffix: e
537          nsize: 3
538          args: -mat_type mpibaij
539       test:
540          suffix: f
541          nsize: 3
542          args: -mat_type mpibaij -trans
543 
544    testset:
545       suffix: 10
546       nsize: 2
547       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
548       args: -ksp_type fgmres -pc_type ksp -f0 ${DATAFILESPATH}/matrices/medium -ksp_fgmres_modifypcksp -ksp_monitor_short
549 
550    testset:
551       suffix: 12
552       requires: matlab
553       args: -pc_type lu -pc_factor_mat_solver_type matlab -f0 ${DATAFILESPATH}/matrices/arco1
554 
555    testset:
556       suffix: 13
557       requires: lusol
558       args: -f0 ${DATAFILESPATH}/matrices/arco1
559       args: -mat_type lusol -pc_type lu
560 
561    testset:
562       nsize: 3
563       args: -f0 ${DATAFILESPATH}/matrices/medium
564       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
565       test:
566          suffix: 14
567          requires: spai
568          args: -pc_type spai
569       test:
570          suffix: 15
571          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
572          args: -pc_type hypre -pc_hypre_type pilut
573       test:
574          suffix: 16
575          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
576          args: -pc_type hypre -pc_hypre_type parasails
577       test:
578          suffix: 17
579          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
580          args: -pc_type hypre -pc_hypre_type boomeramg
581       test:
582          suffix: 18
583          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
584          args: -pc_type hypre -pc_hypre_type euclid
585 
586    testset:
587       suffix: 19
588       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
589       args: -f0 ${DATAFILESPATH}/matrices/poisson1
590       args: -ksp_type cg -pc_type icc
591       args: -pc_factor_levels {{0 2 4}separate output}
592       test:
593       test:
594          args: -mat_type seqsbaij
595 
596    testset:
597       suffix: ILU
598       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
599       args: -f0 ${DATAFILESPATH}/matrices/small
600       args: -pc_factor_levels 1
601       test:
602       test:
603          # This is tested against regular ILU (used to be denoted ILUBAIJ)
604          args: -mat_type baij
605 
606    testset:
607       suffix: aijcusparse
608       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) cuda
609       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info -mat_type aijcusparse -pc_factor_mat_solver_type cusparse -pc_type ilu -vec_type cuda
610 
611    testset:
612       TODO: No output file. Need to determine if deprecated
613       suffix: asm_viennacl
614       nsize: 2
615       requires: viennacl
616       args: -pc_type asm -pc_asm_sub_mat_type aijviennacl -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int${PETSC_INDEX_SIZE}-float${PETSC_SCALAR_SIZE}
617 
618    testset:
619       nsize: 2
620       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
621       args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz -ksp_monitor_short -ksp_rtol 1.E-9 -pc_type hypre -pc_hypre_type boomeramg
622       test:
623          suffix: boomeramg_euclid
624          args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01
625          TODO: Need to determine if deprecated
626       test:
627          suffix: boomeramg_euclid_bj
628          args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01 -pc_hypre_boomeramg_eu_bj
629          TODO: Need to determine if deprecated
630       test:
631          suffix: boomeramg_parasails
632          args: -pc_hypre_boomeramg_smooth_type ParaSails -pc_hypre_boomeramg_smooth_num_levels 2
633       test:
634          suffix: boomeramg_pilut
635          args: -pc_hypre_boomeramg_smooth_type Pilut -pc_hypre_boomeramg_smooth_num_levels 2
636       test:
637          suffix: boomeramg_schwarz
638          args: -pc_hypre_boomeramg_smooth_type Schwarz-smoothers
639 
640    testset:
641       suffix: cg_singlereduction
642       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
643       args: -f0 ${DATAFILESPATH}/matrices/small
644       args: -mat_type mpisbaij -ksp_type cg -pc_type eisenstat -ksp_monitor_short -ksp_converged_reason
645       test:
646       test:
647          args: -ksp_cg_single_reduction
648 
649    testset:
650       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
651       args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz
652       args: -ksp_monitor_short -pc_type icc
653       test:
654          suffix: cr
655          args: -ksp_type cr
656       test:
657          suffix: lcd
658          args: -ksp_type lcd
659 
660    testset:
661       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
662       args: -f0 ${DATAFILESPATH}/matrices/small
663       args: -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info
664       test:
665          suffix: seqaijcrl
666          args: -mat_type seqaijcrl
667       test:
668          suffix: seqaijperm
669          args: -mat_type seqaijperm
670 
671    testset:
672       nsize: 2
673       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
674       args: -f0 ${DATAFILESPATH}/matrices/small
675       args: -ksp_monitor_short -ksp_view
676       # Different output files
677       test:
678          suffix: mpiaijcrl
679          args: -mat_type mpiaijcrl
680       test:
681          suffix: mpiaijperm
682          args: -mat_type mpiaijperm
683 
684    testset:
685       nsize: 4
686       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !defined(PETSC_HAVE_I_MPI_NUMVERSION)
687       args: -ksp_monitor_short -ksp_view
688       test:
689          suffix: xxt
690          args: -f0 ${DATAFILESPATH}/matrices/poisson1 -check_symmetry -ksp_type cg -pc_type tfs
691       test:
692          suffix: xyt
693          args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type gmres -pc_type tfs
694 
695    testset:
696       # The output file here is the same as mumps
697       suffix: mumps_cholesky
698       output_file: output/ex72_mumps.out
699       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
700       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
701       nsize: {{1 2}}
702       test:
703          args: -mat_type sbaij -mat_ignore_lower_triangular
704       test:
705          args: -mat_type aij
706       test:
707          args: -mat_type aij -matload_spd
708 
709    testset:
710       # The output file here is the same as mumps
711       suffix: mumps_lu
712       output_file: output/ex72_mumps.out
713       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
714       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
715       test:
716          args: -mat_type seqaij
717       test:
718          nsize: 2
719          args: -mat_type mpiaij
720       test:
721          args: -mat_type seqbaij -matload_block_size 2
722       test:
723          nsize: 2
724          args: -mat_type mpibaij -matload_block_size 2
725       test:
726          args: -mat_type aij -mat_mumps_icntl_7 5
727          TODO: Need to determine if deprecated
728 
729    test:
730       suffix: mumps_lu_parmetis
731       output_file: output/ex72_mumps.out
732       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps parmetis
733       nsize: 2
734       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 2
735 
736    test:
737       suffix: mumps_lu_ptscotch
738       output_file: output/ex72_mumps.out
739       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps ptscotch
740       nsize: 2
741       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 1
742 
743    testset:
744       # The output file here is the same as mumps
745       suffix: mumps_redundant
746       output_file: output/ex72_mumps_redundant.out
747       nsize: 8
748       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
749       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
750 
751    testset:
752       suffix: pastix_cholesky
753       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
754       output_file: output/ex72_mumps.out
755       nsize: {{1 2}}
756       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2 -pc_type cholesky -mat_type sbaij -mat_ignore_lower_triangular
757 
758    testset:
759       suffix: pastix_lu
760       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
761       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2
762       output_file: output/ex72_mumps.out
763       test:
764          args: -mat_type seqaij
765       test:
766          nsize: 2
767          args: -mat_type mpiaij
768 
769    testset:
770       suffix: pastix_redundant
771       output_file: output/ex72_mumps_redundant.out
772       nsize: 8
773       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
774       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2
775 
776    testset:
777       suffix: superlu_dist_lu
778       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
779       output_file: output/ex72_mumps.out
780       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
781       nsize: {{1 2}}
782 
783    testset:
784       suffix: superlu_dist_redundant
785       nsize: 8
786       output_file: output/ex72_mumps_redundant.out
787       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
788       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
789 
790    testset:
791       suffix: superlu_lu
792       output_file: output/ex72_mumps.out
793       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu
794       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu -num_numfac 2 -num_rhs 2
795 
796    testset:
797       suffix: umfpack
798       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) suitesparse
799       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -mat_type seqaij -pc_factor_mat_solver_type umfpack -num_numfac 2 -num_rhs 2
800 
801    testset:
802       suffix: zeropivot
803       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
804       args: -f0 ${DATAFILESPATH}/matrices/small -test_zeropivot -ksp_converged_reason -ksp_type fgmres -pc_type ksp
805       test:
806          nsize: 3
807          args: -ksp_pc_type bjacobi
808       test:
809          nsize: 2
810          args: -ksp_ksp_type cg -ksp_pc_type bjacobi -ksp_pc_bjacobi_blocks 1
811       #test:
812          #nsize: 3
813          #args: -ksp_ksp_converged_reason -ksp_pc_type bjacobi -ksp_sub_ksp_converged_reason
814          #TODO: Need to determine if deprecated
815 
816    testset:
817       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
818       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type fgmres
819       test:
820          suffix: bddc_seq
821          nsize: 1
822          args: -pc_type bddc
823       test:
824          suffix: bddc_par
825          nsize: 2
826          args: -pc_type bddc
827       test:
828          requires: parmetis
829          suffix: bddc_par_nd_parmetis
830          filter: sed -e "s/Number of iterations =   [0-9]/Number of iterations = 9/g"
831          nsize: 4
832          args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type parmetis
833       test:
834          requires: ptscotch defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
835          suffix: bddc_par_nd_ptscotch
836          filter: sed -e "s/Number of iterations =   [0-9]/Number of iterations = 9/g"
837          nsize: 4
838          args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type ptscotch
839 
840    testset:
841       requires: !__float128 hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
842       test:
843          suffix: hpddm_mat
844          output_file: output/ex72_bddc_seq.out
845          filter: sed -e "s/Number of iterations =   2/Number of iterations =   1/g"
846          nsize: 2
847          args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_pc_type mat
848       test:
849          requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
850          suffix: hpddm_gen_non_hermitian
851          output_file: output/ex72_2.out
852          nsize: 4
853          args: -f0 ${DATAFILESPATH}/matrices/arco1 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.7 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right
854       test:
855          requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps !defined(PETSCTEST_VALGRIND)
856          suffix: hpddm_gen_non_hermitian_baij
857          output_file: output/ex72_5.out
858          nsize: 4
859          timeoutfactor: 2
860          args: -f0 ${DATAFILESPATH}/matrices/arco6 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.8 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right -mat_type baij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_eps_tol 1.0e-2
861 TEST*/
862