xref: /petsc/src/ksp/ksp/tutorials/ex72.c (revision 2e956fe4fc852fabc23b437482e1fb7b77fddb0d)
1 
2 static char help[] = "Reads a PETSc matrix and vector from a file and solves a linear system.\n\
3 This version first preloads and solves a small system, then loads \n\
4 another (larger) system and solves it as well.  This example illustrates\n\
5 preloading of instructions with the smaller system so that more accurate\n\
6 performance monitoring can be done with the larger one (that actually\n\
7 is the system of interest).  See the 'Performance Hints' chapter of the\n\
8 users manual for a discussion of preloading.  Input parameters include\n\
9   -f0 <input_file> : first file to load (small system)\n\
10   -f1 <input_file> : second file to load (larger system)\n\n\
11   -nearnulldim <0> : number of vectors in the near-null space immediately following matrix\n\n\
12   -trans  : solve transpose system instead\n\n";
13 /*
14   This code can be used to test PETSc interface to other packages.\n\
15   Examples of command line options:       \n\
16    ./ex72 -f0 <datafile> -ksp_type preonly  \n\
17         -help -ksp_view                  \n\
18         -num_numfac <num_numfac> -num_rhs <num_rhs> \n\
19         -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu or superlu_dist or mumps \n\
20         -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps \n\
21    mpiexec -n <np> ./ex72 -f0 <datafile> -ksp_type cg -pc_type asm -pc_asm_type basic -sub_pc_type icc -mat_type sbaij
22  \n\n";
23 */
24 
25 /*
26   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
27   automatically includes:
28      petscsys.h       - base PETSc routines   petscvec.h - vectors
29      petscmat.h - matrices
30      petscis.h     - index sets            petscksp.h - Krylov subspace methods
31      petscviewer.h - viewers               petscpc.h  - preconditioners
32 */
33 #include <petscksp.h>
34 
35 int main(int argc,char **args)
36 {
37   KSP            ksp;             /* linear solver context */
38   Mat            A;               /* matrix */
39   Vec            x,b,u;           /* approx solution, RHS, exact solution */
40   PetscViewer    viewer;          /* viewer */
41   char           file[4][PETSC_MAX_PATH_LEN];     /* input file name */
42   PetscBool      table     =PETSC_FALSE,flg,trans=PETSC_FALSE,initialguess = PETSC_FALSE;
43   PetscBool      outputSoln=PETSC_FALSE,constantnullspace = PETSC_FALSE;
44   PetscInt       its,num_numfac,m,n,M,p,nearnulldim = 0;
45   PetscReal      norm;
46   PetscBool      preload=PETSC_TRUE,isSymmetric,cknorm=PETSC_FALSE,initialguessfile = PETSC_FALSE;
47   PetscMPIInt    rank;
48   char           initialguessfilename[PETSC_MAX_PATH_LEN];
49   char           mtype[PETSC_MAX_PATH_LEN];
50 
51   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
52   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
53   PetscCall(PetscOptionsGetBool(NULL,NULL,"-table",&table,NULL));
54   PetscCall(PetscOptionsGetBool(NULL,NULL,"-constantnullspace",&constantnullspace,NULL));
55   PetscCall(PetscOptionsGetBool(NULL,NULL,"-trans",&trans,NULL));
56   PetscCall(PetscOptionsGetBool(NULL,NULL,"-initialguess",&initialguess,NULL));
57   PetscCall(PetscOptionsGetBool(NULL,NULL,"-output_solution",&outputSoln,NULL));
58   PetscCall(PetscOptionsGetString(NULL,NULL,"-initialguessfilename",initialguessfilename,sizeof(initialguessfilename),&initialguessfile));
59   PetscCall(PetscOptionsGetInt(NULL,NULL,"-nearnulldim",&nearnulldim,NULL));
60 
61   /*
62      Determine files from which we read the two linear systems
63      (matrix and right-hand-side vector).
64   */
65   PetscCall(PetscOptionsGetString(NULL,NULL,"-f",file[0],sizeof(file[0]),&flg));
66   if (flg) {
67     PetscCall(PetscStrcpy(file[1],file[0]));
68     preload = PETSC_FALSE;
69   } else {
70     PetscCall(PetscOptionsGetString(NULL,NULL,"-f0",file[0],sizeof(file[0]),&flg));
71     PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must indicate binary file with the -f0 or -f option");
72     PetscCall(PetscOptionsGetString(NULL,NULL,"-f1",file[1],sizeof(file[1]),&flg));
73     if (!flg) preload = PETSC_FALSE;   /* don't bother with second system */
74   }
75 
76   /* -----------------------------------------------------------
77                   Beginning of linear solver loop
78      ----------------------------------------------------------- */
79   /*
80      Loop through the linear solve 2 times.
81       - The intention here is to preload and solve a small system;
82         then load another (larger) system and solve it as well.
83         This process preloads the instructions with the smaller
84         system so that more accurate performance monitoring (via
85         -log_view) can be done with the larger one (that actually
86         is the system of interest).
87   */
88   PetscPreLoadBegin(preload,"Load system");
89 
90   /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
91                          Load system
92    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
93 
94   /*
95      Open binary file.  Note that we use FILE_MODE_READ to indicate
96      reading from this file.
97   */
98   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file[PetscPreLoadIt],FILE_MODE_READ,&viewer));
99 
100   /*
101      Load the matrix and vector; then destroy the viewer.
102   */
103   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
104   PetscCall(MatSetFromOptions(A));
105   PetscCall(MatLoad(A,viewer));
106 
107   PetscCall(PetscOptionsGetString(NULL,NULL,"-mat_convert_type",mtype,sizeof(mtype),&flg));
108   if (flg) PetscCall(MatConvert(A,mtype,MAT_INPLACE_MATRIX,&A));
109 
110   if (nearnulldim) {
111     MatNullSpace nullsp;
112     Vec          *nullvecs;
113     PetscInt     i;
114     PetscCall(PetscMalloc1(nearnulldim,&nullvecs));
115     for (i=0; i<nearnulldim; i++) {
116       PetscCall(VecCreate(PETSC_COMM_WORLD,&nullvecs[i]));
117       PetscCall(VecLoad(nullvecs[i],viewer));
118     }
119     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_FALSE,nearnulldim,nullvecs,&nullsp));
120     PetscCall(MatSetNearNullSpace(A,nullsp));
121     for (i=0; i<nearnulldim; i++) PetscCall(VecDestroy(&nullvecs[i]));
122     PetscCall(PetscFree(nullvecs));
123     PetscCall(MatNullSpaceDestroy(&nullsp));
124   }
125   if (constantnullspace) {
126     MatNullSpace constant;
127     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&constant));
128     PetscCall(MatSetNullSpace(A,constant));
129     PetscCall(MatNullSpaceDestroy(&constant));
130   }
131   flg  = PETSC_FALSE;
132   PetscCall(PetscOptionsGetString(NULL,NULL,"-rhs",file[2],sizeof(file[2]),&flg));
133   PetscCall(VecCreate(PETSC_COMM_WORLD,&b));
134   if (flg) {   /* rhs is stored in a separate file */
135     if (file[2][0] == '0' || file[2][0] == 0) {
136       PetscInt    m;
137       PetscScalar one = 1.0;
138       PetscCall(PetscInfo(0,"Using vector of ones for RHS\n"));
139       PetscCall(MatGetLocalSize(A,&m,NULL));
140       PetscCall(VecSetSizes(b,m,PETSC_DECIDE));
141       PetscCall(VecSetFromOptions(b));
142       PetscCall(VecSet(b,one));
143     } else {
144       PetscCall(PetscViewerDestroy(&viewer));
145       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file[2],FILE_MODE_READ,&viewer));
146       PetscCall(VecSetFromOptions(b));
147       PetscCall(VecLoad(b,viewer));
148     }
149   } else {   /* rhs is stored in the same file as matrix */
150     PetscCall(VecSetFromOptions(b));
151     PetscCall(VecLoad(b,viewer));
152   }
153   PetscCall(PetscViewerDestroy(&viewer));
154 
155   /* Make A singular for testing zero-pivot of ilu factorization */
156   /* Example: ./ex72 -f0 <datafile> -test_zeropivot -pc_factor_shift_type <shift_type> */
157   flg  = PETSC_FALSE;
158   PetscCall(PetscOptionsGetBool(NULL,NULL, "-test_zeropivot", &flg,NULL));
159   if (flg) { /* set a row as zeros */
160     PetscInt          row=0;
161     PetscCall(MatSetOption(A,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE));
162     PetscCall(MatZeroRows(A,1,&row,0.0,NULL,NULL));
163   }
164 
165   /* Check whether A is symmetric, then set A->symmetric option */
166   flg = PETSC_FALSE;
167   PetscCall(PetscOptionsGetBool(NULL,NULL, "-check_symmetry", &flg,NULL));
168   if (flg) {
169     PetscCall(MatIsSymmetric(A,0.0,&isSymmetric));
170     if (!isSymmetric) {
171       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Warning: A is non-symmetric \n"));
172     }
173   }
174 
175   /*
176      If the loaded matrix is larger than the vector (due to being padded
177      to match the block size of the system), then create a new padded vector.
178   */
179 
180   PetscCall(MatGetLocalSize(A,NULL,&n));
181   PetscCall(MatGetSize(A,&M,NULL));
182   PetscCall(VecGetSize(b,&m));
183   PetscCall(VecGetLocalSize(b,&p));
184   preload = (PetscBool)(M != m || p != n); /* Global or local dimension mismatch */
185   PetscCall(MPIU_Allreduce(&preload,&flg,1,MPIU_BOOL,MPI_LOR,PetscObjectComm((PetscObject)A)));
186   if (flg) { /* Create a new vector b by padding the old one */
187     PetscInt    j,mvec,start,end,indx;
188     Vec         tmp;
189     PetscScalar *bold;
190 
191     PetscCall(VecCreate(PETSC_COMM_WORLD,&tmp));
192     PetscCall(VecSetSizes(tmp,n,PETSC_DECIDE));
193     PetscCall(VecSetFromOptions(tmp));
194     PetscCall(VecGetOwnershipRange(b,&start,&end));
195     PetscCall(VecGetLocalSize(b,&mvec));
196     PetscCall(VecGetArray(b,&bold));
197     for (j=0; j<mvec; j++) {
198       indx = start+j;
199       PetscCall(VecSetValues(tmp,1,&indx,bold+j,INSERT_VALUES));
200     }
201     PetscCall(VecRestoreArray(b,&bold));
202     PetscCall(VecDestroy(&b));
203     PetscCall(VecAssemblyBegin(tmp));
204     PetscCall(VecAssemblyEnd(tmp));
205     b    = tmp;
206   }
207 
208   PetscCall(MatCreateVecs(A,&x,NULL));
209   PetscCall(VecDuplicate(b,&u));
210   if (initialguessfile) {
211     PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,initialguessfilename,FILE_MODE_READ,&viewer));
212     PetscCall(VecLoad(x,viewer));
213     PetscCall(PetscViewerDestroy(&viewer));
214     initialguess = PETSC_TRUE;
215   } else if (initialguess) {
216     PetscCall(VecSet(x,1.0));
217   } else {
218     PetscCall(VecSet(x,0.0));
219   }
220 
221   /* Check scaling in A */
222   flg  = PETSC_FALSE;
223   PetscCall(PetscOptionsGetBool(NULL,NULL, "-check_scaling", &flg,NULL));
224   if (flg) {
225     Vec       max, min;
226     PetscInt  idx;
227     PetscReal val;
228 
229     PetscCall(VecDuplicate(x, &max));
230     PetscCall(VecDuplicate(x, &min));
231     PetscCall(MatGetRowMaxAbs(A, max, NULL));
232     PetscCall(MatGetRowMinAbs(A, min, NULL));
233     {
234       PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "max.data", &viewer));
235       PetscCall(VecView(max, viewer));
236       PetscCall(PetscViewerDestroy(&viewer));
237       PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "min.data", &viewer));
238       PetscCall(VecView(min, viewer));
239       PetscCall(PetscViewerDestroy(&viewer));
240     }
241     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
242     PetscCall(VecMax(max, &idx, &val));
243     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
244     PetscCall(VecView(min, PETSC_VIEWER_DRAW_WORLD));
245     PetscCall(VecMin(min, &idx, &val));
246     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest min row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
247     PetscCall(VecMin(max, &idx, &val));
248     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
249     PetscCall(VecPointwiseDivide(max, max, min));
250     PetscCall(VecMax(max, &idx, &val));
251     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest row ratio %g at row %" PetscInt_FMT "\n", (double)val, idx));
252     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
253     PetscCall(VecDestroy(&max));
254     PetscCall(VecDestroy(&min));
255   }
256 
257   /*  PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
258   /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
259                     Setup solve for system
260    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
261   /*
262      Conclude profiling last stage; begin profiling next stage.
263   */
264   PetscPreLoadStage("KSPSetUpSolve");
265 
266   /*
267      Create linear solver; set operators; set runtime options.
268   */
269   PetscCall(KSPCreate(PETSC_COMM_WORLD,&ksp));
270   PetscCall(KSPSetInitialGuessNonzero(ksp,initialguess));
271   num_numfac = 1;
272   PetscCall(PetscOptionsGetInt(NULL,NULL,"-num_numfac",&num_numfac,NULL));
273   while (num_numfac--) {
274     PC        pc;
275     PetscBool lsqr,isbddc,ismatis;
276     char      str[32];
277 
278     PetscCall(PetscOptionsGetString(NULL,NULL,"-ksp_type",str,sizeof(str),&lsqr));
279     if (lsqr) {
280       PetscCall(PetscStrcmp("lsqr",str,&lsqr));
281     }
282     if (lsqr) {
283       Mat BtB;
284       PetscCall(MatTransposeMatMult(A,A,MAT_INITIAL_MATRIX,4,&BtB));
285       PetscCall(KSPSetOperators(ksp,A,BtB));
286       PetscCall(MatDestroy(&BtB));
287     } else {
288       PetscCall(KSPSetOperators(ksp,A,A));
289     }
290     PetscCall(KSPSetFromOptions(ksp));
291 
292     /* if we test BDDC, make sure pmat is of type MATIS */
293     PetscCall(KSPGetPC(ksp,&pc));
294     PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCBDDC,&isbddc));
295     PetscCall(PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis));
296     if (isbddc && !ismatis) {
297       Mat J;
298 
299       PetscCall(MatConvert(A,MATIS,MAT_INITIAL_MATRIX,&J));
300       PetscCall(KSPSetOperators(ksp,A,J));
301       PetscCall(MatDestroy(&J));
302     }
303 
304     /*
305      Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
306      enable more precise profiling of setting up the preconditioner.
307      These calls are optional, since both will be called within
308      KSPSolve() if they haven't been called already.
309     */
310     PetscCall(KSPSetUp(ksp));
311     PetscCall(KSPSetUpOnBlocks(ksp));
312 
313     /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
314                          Solve system
315       - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
316 
317     /*
318      Solve linear system;
319     */
320     if (trans) {
321       PetscCall(KSPSolveTranspose(ksp,b,x));
322       PetscCall(KSPGetIterationNumber(ksp,&its));
323     } else {
324       PetscInt num_rhs=1;
325       PetscCall(PetscOptionsGetInt(NULL,NULL,"-num_rhs",&num_rhs,NULL));
326       cknorm = PETSC_FALSE;
327       PetscCall(PetscOptionsGetBool(NULL,NULL,"-cknorm",&cknorm,NULL));
328       while (num_rhs--) {
329         if (num_rhs == 1) VecSet(x,0.0);
330         PetscCall(KSPSolve(ksp,b,x));
331       }
332       PetscCall(KSPGetIterationNumber(ksp,&its));
333       if (cknorm) {     /* Check error for each rhs */
334         if (trans) {
335           PetscCall(MatMultTranspose(A,x,u));
336         } else {
337           PetscCall(MatMult(A,x,u));
338         }
339         PetscCall(VecAXPY(u,-1.0,b));
340         PetscCall(VecNorm(u,NORM_2,&norm));
341         PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Number of iterations = %3" PetscInt_FMT "\n",its));
342         if (!PetscIsNanScalar(norm)) {
343           if (norm < 1.e-12) {
344             PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Residual norm < 1.e-12\n"));
345           } else {
346             PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Residual norm %g\n",(double)norm));
347           }
348         }
349       }
350     }   /* while (num_rhs--) */
351 
352     /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
353           Check error, print output, free data structures.
354      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
355 
356     /*
357        Check error
358     */
359     if (trans) {
360       PetscCall(MatMultTranspose(A,x,u));
361     } else {
362       PetscCall(MatMult(A,x,u));
363     }
364     PetscCall(VecAXPY(u,-1.0,b));
365     PetscCall(VecNorm(u,NORM_2,&norm));
366     /*
367      Write output (optionally using table for solver details).
368       - PetscPrintf() handles output for multiprocessor jobs
369         by printing from only one processor in the communicator.
370       - KSPView() prints information about the linear solver.
371     */
372     if (table) {
373       char        *matrixname,kspinfo[120];
374 
375       /*
376        Open a string viewer; then write info to it.
377       */
378       PetscCall(PetscViewerStringOpen(PETSC_COMM_WORLD,kspinfo,sizeof(kspinfo),&viewer));
379       PetscCall(KSPView(ksp,viewer));
380       PetscCall(PetscStrrchr(file[PetscPreLoadIt],'/',&matrixname));
381       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%-8.8s %3" PetscInt_FMT " %2.0e %s \n",matrixname,its,(double)norm,kspinfo));
382 
383       /*
384         Destroy the viewer
385       */
386       PetscCall(PetscViewerDestroy(&viewer));
387     } else {
388       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %3" PetscInt_FMT "\n",its));
389       if (!PetscIsNanScalar(norm)) {
390         if (norm < 1.e-12 && !PetscIsNanScalar((PetscScalar)norm)) {
391           PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Residual norm < 1.e-12\n"));
392         } else {
393           PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Residual norm %g\n",(double)norm));
394         }
395       }
396     }
397     PetscCall(PetscOptionsGetString(NULL,NULL,"-solution",file[3],sizeof(file[3]),&flg));
398     if (flg) {
399       Vec         xstar;
400       PetscReal   norm;
401 
402       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file[3],FILE_MODE_READ,&viewer));
403       PetscCall(VecCreate(PETSC_COMM_WORLD,&xstar));
404       PetscCall(VecLoad(xstar,viewer));
405       PetscCall(VecAXPY(xstar, -1.0, x));
406       PetscCall(VecNorm(xstar, NORM_2, &norm));
407       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)norm));
408       PetscCall(VecDestroy(&xstar));
409       PetscCall(PetscViewerDestroy(&viewer));
410     }
411     if (outputSoln) {
412       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"solution.petsc",FILE_MODE_WRITE,&viewer));
413       PetscCall(VecView(x, viewer));
414       PetscCall(PetscViewerDestroy(&viewer));
415     }
416 
417     flg  = PETSC_FALSE;
418     PetscCall(PetscOptionsGetBool(NULL,NULL, "-ksp_reason", &flg,NULL));
419     if (flg) {
420       KSPConvergedReason reason;
421       PetscCall(KSPGetConvergedReason(ksp,&reason));
422       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"KSPConvergedReason: %s\n", KSPConvergedReasons[reason]));
423     }
424 
425   }   /* while (num_numfac--) */
426 
427   /*
428      Free work space.  All PETSc objects should be destroyed when they
429      are no longer needed.
430   */
431   PetscCall(MatDestroy(&A)); PetscCall(VecDestroy(&b));
432   PetscCall(VecDestroy(&u)); PetscCall(VecDestroy(&x));
433   PetscCall(KSPDestroy(&ksp));
434   PetscPreLoadEnd();
435   /* -----------------------------------------------------------
436                       End of linear solver loop
437      ----------------------------------------------------------- */
438 
439   PetscCall(PetscFinalize());
440   return 0;
441 }
442 
443 /*TEST
444 
445    build:
446       requires: !complex
447 
448    testset:
449       suffix: 1
450       nsize: 2
451       args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
452       requires: !__float128
453 
454    testset:
455       suffix: 1a
456       args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
457       requires: !__float128
458 
459    testset:
460       nsize: 2
461       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
462       args: -f0 ${DATAFILESPATH}/matrices/medium
463       args:  -ksp_type bicg
464       test:
465          suffix: 2
466 
467    testset:
468       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
469       args: -f0 ${DATAFILESPATH}/matrices/medium
470       args: -ksp_type bicg
471       test:
472          suffix: 4
473          args: -pc_type lu
474       test:
475          suffix: 5
476 
477    testset:
478       suffix: 6
479       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
480       args: -f0 ${DATAFILESPATH}/matrices/fem1
481       args: -pc_factor_levels 2 -pc_factor_fill 1.73 -ksp_gmres_cgs_refinement_type refine_always
482 
483    testset:
484       TODO: Matrix row/column sizes are not compatible with block size
485       suffix: 7
486       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
487       args: -f0 ${DATAFILESPATH}/matrices/medium
488       args: -viewer_binary_skip_info -mat_type seqbaij
489       args: -matload_block_size {{2 3 4 5 6 7 8}separate output}
490       args: -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always
491       args: -ksp_rtol 1.0e-15 -ksp_monitor_short
492       test:
493          suffix: a
494       test:
495          suffix: b
496          args: -pc_factor_mat_ordering_type nd
497       test:
498          suffix: c
499          args: -pc_factor_levels 1
500       test:
501          requires: metis
502          suffix: d
503          args: -pc_factor_mat_ordering_type metisnd
504 
505    testset:
506       TODO: Matrix row/column sizes are not compatible with block size
507       suffix: 7_d
508       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
509       args: -f0 ${DATAFILESPATH}/matrices/medium
510       args: -viewer_binary_skip_info -mat_type seqbaij
511       args: -matload_block_size {{2 3 4 5 6 7 8}shared output}
512       args: -ksp_type preonly -pc_type lu
513 
514    testset:
515       suffix: 8
516       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
517       args: -f0 ${DATAFILESPATH}/matrices/medium
518       args: -ksp_diagonal_scale -pc_type eisenstat -ksp_monitor_short -ksp_diagonal_scale_fix -ksp_gmres_cgs_refinement_type refine_always -mat_no_inode
519 
520    testset:
521       TODO: Matrix row/column sizes are not compatible with block size
522       suffix: 9
523       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
524       args: -f0 ${DATAFILESPATH}/matrices/medium
525       args: -viewer_binary_skip_info  -matload_block_size {{1 2 3 4 5 6 7}separate output} -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always -ksp_rtol 1.0e-15 -ksp_monitor_short
526       test:
527          suffix: a
528          args: -mat_type seqbaij
529       test:
530          suffix: b
531          args: -mat_type seqbaij -trans
532       test:
533          suffix: c
534          nsize: 2
535          args: -mat_type mpibaij
536       test:
537          suffix: d
538          nsize: 2
539          args: -mat_type mpibaij -trans
540       test:
541          suffix: e
542          nsize: 3
543          args: -mat_type mpibaij
544       test:
545          suffix: f
546          nsize: 3
547          args: -mat_type mpibaij -trans
548 
549    testset:
550       suffix: 10
551       nsize: 2
552       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
553       args: -ksp_type fgmres -pc_type ksp -f0 ${DATAFILESPATH}/matrices/medium -ksp_fgmres_modifypcksp -ksp_monitor_short
554 
555    testset:
556       suffix: 12
557       requires: matlab
558       args: -pc_type lu -pc_factor_mat_solver_type matlab -f0 ${DATAFILESPATH}/matrices/arco1
559 
560    testset:
561       suffix: 13
562       requires: lusol
563       args: -f0 ${DATAFILESPATH}/matrices/arco1
564       args: -mat_type lusol -pc_type lu
565 
566    testset:
567       nsize: 3
568       args: -f0 ${DATAFILESPATH}/matrices/medium
569       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
570       test:
571          suffix: 14
572          requires: spai
573          args: -pc_type spai
574       test:
575          suffix: 15
576          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
577          args: -pc_type hypre -pc_hypre_type pilut
578       test:
579          suffix: 16
580          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
581          args: -pc_type hypre -pc_hypre_type parasails
582       test:
583          suffix: 17
584          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
585          args: -pc_type hypre -pc_hypre_type boomeramg
586       test:
587          suffix: 18
588          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
589          args: -pc_type hypre -pc_hypre_type euclid
590 
591    testset:
592       suffix: 19
593       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
594       args: -f0 ${DATAFILESPATH}/matrices/poisson1
595       args: -ksp_type cg -pc_type icc
596       args: -pc_factor_levels {{0 2 4}separate output}
597       test:
598       test:
599          args: -mat_type seqsbaij
600 
601    testset:
602       suffix: ILU
603       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
604       args: -f0 ${DATAFILESPATH}/matrices/small
605       args: -pc_factor_levels 1
606       test:
607       test:
608          # This is tested against regular ILU (used to be denoted ILUBAIJ)
609          args: -mat_type baij
610 
611    testset:
612       suffix: aijcusparse
613       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) cuda
614       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info -mat_type aijcusparse -pc_factor_mat_solver_type cusparse -pc_type ilu -vec_type cuda
615 
616    testset:
617       TODO: No output file. Need to determine if deprecated
618       suffix: asm_viennacl
619       nsize: 2
620       requires: viennacl
621       args: -pc_type asm -pc_asm_sub_mat_type aijviennacl -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int${PETSC_INDEX_SIZE}-float${PETSC_SCALAR_SIZE}
622 
623    testset:
624       nsize: 2
625       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
626       args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz -ksp_monitor_short -ksp_rtol 1.E-9 -pc_type hypre -pc_hypre_type boomeramg
627       test:
628          suffix: boomeramg_euclid
629          args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01
630          TODO: Need to determine if deprecated
631       test:
632          suffix: boomeramg_euclid_bj
633          args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01 -pc_hypre_boomeramg_eu_bj
634          TODO: Need to determine if deprecated
635       test:
636          suffix: boomeramg_parasails
637          args: -pc_hypre_boomeramg_smooth_type ParaSails -pc_hypre_boomeramg_smooth_num_levels 2
638       test:
639          suffix: boomeramg_pilut
640          args: -pc_hypre_boomeramg_smooth_type Pilut -pc_hypre_boomeramg_smooth_num_levels 2
641       test:
642          suffix: boomeramg_schwarz
643          args: -pc_hypre_boomeramg_smooth_type Schwarz-smoothers
644 
645    testset:
646       suffix: cg_singlereduction
647       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
648       args: -f0 ${DATAFILESPATH}/matrices/small
649       args: -mat_type mpisbaij -ksp_type cg -pc_type eisenstat -ksp_monitor_short -ksp_converged_reason
650       test:
651       test:
652          args: -ksp_cg_single_reduction
653 
654    testset:
655       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
656       args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz
657       args: -ksp_monitor_short -pc_type icc
658       test:
659          suffix: cr
660          args: -ksp_type cr
661       test:
662          suffix: lcd
663          args: -ksp_type lcd
664 
665    testset:
666       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
667       args: -f0 ${DATAFILESPATH}/matrices/small
668       args: -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info
669       test:
670          suffix: seqaijcrl
671          args: -mat_type seqaijcrl
672       test:
673          suffix: seqaijperm
674          args: -mat_type seqaijperm
675 
676    testset:
677       nsize: 2
678       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
679       args: -f0 ${DATAFILESPATH}/matrices/small
680       args: -ksp_monitor_short -ksp_view
681       # Different output files
682       test:
683          suffix: mpiaijcrl
684          args: -mat_type mpiaijcrl
685       test:
686          suffix: mpiaijperm
687          args: -mat_type mpiaijperm
688 
689    testset:
690       nsize: 4
691       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !defined(PETSC_HAVE_I_MPI_NUMVERSION)
692       args: -ksp_monitor_short -ksp_view
693       test:
694          suffix: xxt
695          args: -f0 ${DATAFILESPATH}/matrices/poisson1 -check_symmetry -ksp_type cg -pc_type tfs
696       test:
697          suffix: xyt
698          args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type gmres -pc_type tfs
699 
700    testset:
701       # The output file here is the same as mumps
702       suffix: mumps_cholesky
703       output_file: output/ex72_mumps.out
704       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
705       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
706       nsize: {{1 2}}
707       test:
708          args: -mat_type sbaij -mat_ignore_lower_triangular
709       test:
710          args: -mat_type aij
711       test:
712          args: -mat_type aij -matload_spd
713 
714    testset:
715       # The output file here is the same as mumps
716       suffix: mumps_lu
717       output_file: output/ex72_mumps.out
718       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
719       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
720       test:
721          args: -mat_type seqaij
722       test:
723          nsize: 2
724          args: -mat_type mpiaij
725       test:
726          args: -mat_type seqbaij -matload_block_size 2
727       test:
728          nsize: 2
729          args: -mat_type mpibaij -matload_block_size 2
730       test:
731          args: -mat_type aij -mat_mumps_icntl_7 5
732          TODO: Need to determine if deprecated
733 
734    test:
735       suffix: mumps_lu_parmetis
736       output_file: output/ex72_mumps.out
737       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps parmetis
738       nsize: 2
739       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 2
740 
741    test:
742       suffix: mumps_lu_ptscotch
743       output_file: output/ex72_mumps.out
744       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps ptscotch
745       nsize: 2
746       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 1
747 
748    testset:
749       # The output file here is the same as mumps
750       suffix: mumps_redundant
751       output_file: output/ex72_mumps_redundant.out
752       nsize: 8
753       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
754       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
755 
756    testset:
757       suffix: pastix_cholesky
758       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
759       output_file: output/ex72_mumps.out
760       nsize: {{1 2}}
761       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2 -pc_type cholesky -mat_type sbaij -mat_ignore_lower_triangular
762 
763    testset:
764       suffix: pastix_lu
765       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
766       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2
767       output_file: output/ex72_mumps.out
768       test:
769          args: -mat_type seqaij
770       test:
771          nsize: 2
772          args: -mat_type mpiaij
773 
774    testset:
775       suffix: pastix_redundant
776       output_file: output/ex72_mumps_redundant.out
777       nsize: 8
778       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
779       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2
780 
781    testset:
782       suffix: superlu_dist_lu
783       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
784       output_file: output/ex72_mumps.out
785       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
786       nsize: {{1 2}}
787 
788    testset:
789       suffix: superlu_dist_redundant
790       nsize: 8
791       output_file: output/ex72_mumps_redundant.out
792       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
793       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
794 
795    testset:
796       suffix: superlu_lu
797       output_file: output/ex72_mumps.out
798       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu
799       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu -num_numfac 2 -num_rhs 2
800 
801    testset:
802       suffix: umfpack
803       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) suitesparse
804       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -mat_type seqaij -pc_factor_mat_solver_type umfpack -num_numfac 2 -num_rhs 2
805 
806    testset:
807       suffix: zeropivot
808       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
809       args: -f0 ${DATAFILESPATH}/matrices/small -test_zeropivot -ksp_converged_reason -ksp_type fgmres -pc_type ksp
810       test:
811          nsize: 3
812          args: -ksp_pc_type bjacobi
813       test:
814          nsize: 2
815          args: -ksp_ksp_type cg -ksp_pc_type bjacobi -ksp_pc_bjacobi_blocks 1
816       #test:
817          #nsize: 3
818          #args: -ksp_ksp_converged_reason -ksp_pc_type bjacobi -ksp_sub_ksp_converged_reason
819          #TODO: Need to determine if deprecated
820 
821    testset:
822       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
823       args: -mat_convert_type is -f0 ${DATAFILESPATH}/matrices/medium -ksp_type fgmres
824       test:
825          suffix: aij_gdsw
826          nsize: 4
827          args: -mat_convert_type aij -pc_type mg -pc_mg_levels 2 -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
828       test:
829          output_file: output/ex72_aij_gdsw.out
830          suffix: is_gdsw
831          nsize: 4
832          args: -pc_type mg -pc_mg_levels 2  -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
833       test:
834          suffix: is_asm
835          nsize: {{1 2}separate output}
836          args: -pc_type asm
837       test:
838          suffix: bddc_seq
839          nsize: 1
840          args: -pc_type bddc
841       test:
842          suffix: bddc_par
843          nsize: 2
844          args: -pc_type bddc
845       test:
846          requires: parmetis
847          suffix: bddc_par_nd_parmetis
848          filter: sed -e "s/Number of iterations =   [0-9]/Number of iterations = 9/g"
849          nsize: 4
850          args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type parmetis
851       test:
852          requires: ptscotch defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
853          suffix: bddc_par_nd_ptscotch
854          filter: sed -e "s/Number of iterations =   [0-9]/Number of iterations = 9/g"
855          nsize: 4
856          args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type ptscotch
857 
858    testset:
859       requires: !__float128 hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
860       test:
861          suffix: hpddm_mat
862          output_file: output/ex72_bddc_seq.out
863          filter: sed -e "s/Number of iterations =   2/Number of iterations =   1/g"
864          nsize: 2
865          args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_pc_type mat
866       test:
867          requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
868          suffix: hpddm_gen_non_hermitian
869          output_file: output/ex72_2.out
870          nsize: 4
871          args: -f0 ${DATAFILESPATH}/matrices/arco1 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.7 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right
872       test:
873          requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps !defined(PETSCTEST_VALGRIND)
874          suffix: hpddm_gen_non_hermitian_baij
875          output_file: output/ex72_5.out
876          nsize: 4
877          timeoutfactor: 2
878          args: -f0 ${DATAFILESPATH}/matrices/arco6 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.8 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right -mat_type baij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_eps_tol 1.0e-2
879 TEST*/
880