xref: /petsc/src/ksp/ksp/tutorials/ex72.c (revision dbbe0bcd3f3a8fbab5a45420dc06f8387e5764c6)
1 
2 static char help[] = "Reads a PETSc matrix and vector from a file and solves a linear system.\n\
3 This version first preloads and solves a small system, then loads \n\
4 another (larger) system and solves it as well.  This example illustrates\n\
5 preloading of instructions with the smaller system so that more accurate\n\
6 performance monitoring can be done with the larger one (that actually\n\
7 is the system of interest).  See the 'Performance Hints' chapter of the\n\
8 users manual for a discussion of preloading.  Input parameters include\n\
9   -f0 <input_file> : first file to load (small system)\n\
10   -f1 <input_file> : second file to load (larger system)\n\n\
11   -nearnulldim <0> : number of vectors in the near-null space immediately following matrix\n\n\
12   -trans  : solve transpose system instead\n\n";
13 /*
14   This code can be used to test PETSc interface to other packages.\n\
15   Examples of command line options:       \n\
16    ./ex72 -f0 <datafile> -ksp_type preonly  \n\
17         -help -ksp_view                  \n\
18         -num_numfac <num_numfac> -num_rhs <num_rhs> \n\
19         -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu or superlu_dist or mumps \n\
20         -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps \n\
21    mpiexec -n <np> ./ex72 -f0 <datafile> -ksp_type cg -pc_type asm -pc_asm_type basic -sub_pc_type icc -mat_type sbaij
22  \n\n";
23 */
24 
25 /*
26   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
27   automatically includes:
28      petscsys.h       - base PETSc routines   petscvec.h - vectors
29      petscmat.h - matrices
30      petscis.h     - index sets            petscksp.h - Krylov subspace methods
31      petscviewer.h - viewers               petscpc.h  - preconditioners
32 */
33 #include <petscksp.h>
34 
35 int main(int argc,char **args)
36 {
37   KSP            ksp;             /* linear solver context */
38   Mat            A;               /* matrix */
39   Vec            x,b,u;           /* approx solution, RHS, exact solution */
40   PetscViewer    viewer;          /* viewer */
41   char           file[4][PETSC_MAX_PATH_LEN];     /* input file name */
42   PetscBool      table     =PETSC_FALSE,flg,trans=PETSC_FALSE,initialguess = PETSC_FALSE;
43   PetscBool      outputSoln=PETSC_FALSE,constantnullspace = PETSC_FALSE;
44   PetscInt       its,num_numfac,m,n,M,p,nearnulldim = 0;
45   PetscReal      norm;
46   PetscBool      preload=PETSC_TRUE,isSymmetric,cknorm=PETSC_FALSE,initialguessfile = PETSC_FALSE;
47   PetscMPIInt    rank;
48   char           initialguessfilename[PETSC_MAX_PATH_LEN];
49   char           mtype[PETSC_MAX_PATH_LEN];
50 
51   PetscFunctionBeginUser;
52   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
53   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
54   PetscCall(PetscOptionsGetBool(NULL,NULL,"-table",&table,NULL));
55   PetscCall(PetscOptionsGetBool(NULL,NULL,"-constantnullspace",&constantnullspace,NULL));
56   PetscCall(PetscOptionsGetBool(NULL,NULL,"-trans",&trans,NULL));
57   PetscCall(PetscOptionsGetBool(NULL,NULL,"-initialguess",&initialguess,NULL));
58   PetscCall(PetscOptionsGetBool(NULL,NULL,"-output_solution",&outputSoln,NULL));
59   PetscCall(PetscOptionsGetString(NULL,NULL,"-initialguessfilename",initialguessfilename,sizeof(initialguessfilename),&initialguessfile));
60   PetscCall(PetscOptionsGetInt(NULL,NULL,"-nearnulldim",&nearnulldim,NULL));
61 
62   /*
63      Determine files from which we read the two linear systems
64      (matrix and right-hand-side vector).
65   */
66   PetscCall(PetscOptionsGetString(NULL,NULL,"-f",file[0],sizeof(file[0]),&flg));
67   if (flg) {
68     PetscCall(PetscStrcpy(file[1],file[0]));
69     preload = PETSC_FALSE;
70   } else {
71     PetscCall(PetscOptionsGetString(NULL,NULL,"-f0",file[0],sizeof(file[0]),&flg));
72     PetscCheck(flg,PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must indicate binary file with the -f0 or -f option");
73     PetscCall(PetscOptionsGetString(NULL,NULL,"-f1",file[1],sizeof(file[1]),&flg));
74     if (!flg) preload = PETSC_FALSE;   /* don't bother with second system */
75   }
76 
77   /* -----------------------------------------------------------
78                   Beginning of linear solver loop
79      ----------------------------------------------------------- */
80   /*
81      Loop through the linear solve 2 times.
82       - The intention here is to preload and solve a small system;
83         then load another (larger) system and solve it as well.
84         This process preloads the instructions with the smaller
85         system so that more accurate performance monitoring (via
86         -log_view) can be done with the larger one (that actually
87         is the system of interest).
88   */
89   PetscPreLoadBegin(preload,"Load system");
90 
91   /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
92                          Load system
93    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
94 
95   /*
96      Open binary file.  Note that we use FILE_MODE_READ to indicate
97      reading from this file.
98   */
99   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file[PetscPreLoadIt],FILE_MODE_READ,&viewer));
100 
101   /*
102      Load the matrix and vector; then destroy the viewer.
103   */
104   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
105   PetscCall(MatSetFromOptions(A));
106   PetscCall(MatLoad(A,viewer));
107 
108   PetscCall(PetscOptionsGetString(NULL,NULL,"-mat_convert_type",mtype,sizeof(mtype),&flg));
109   if (flg) PetscCall(MatConvert(A,mtype,MAT_INPLACE_MATRIX,&A));
110 
111   if (nearnulldim) {
112     MatNullSpace nullsp;
113     Vec          *nullvecs;
114     PetscInt     i;
115     PetscCall(PetscMalloc1(nearnulldim,&nullvecs));
116     for (i=0; i<nearnulldim; i++) {
117       PetscCall(VecCreate(PETSC_COMM_WORLD,&nullvecs[i]));
118       PetscCall(VecLoad(nullvecs[i],viewer));
119     }
120     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_FALSE,nearnulldim,nullvecs,&nullsp));
121     PetscCall(MatSetNearNullSpace(A,nullsp));
122     for (i=0; i<nearnulldim; i++) PetscCall(VecDestroy(&nullvecs[i]));
123     PetscCall(PetscFree(nullvecs));
124     PetscCall(MatNullSpaceDestroy(&nullsp));
125   }
126   if (constantnullspace) {
127     MatNullSpace constant;
128     PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&constant));
129     PetscCall(MatSetNullSpace(A,constant));
130     PetscCall(MatNullSpaceDestroy(&constant));
131   }
132   flg  = PETSC_FALSE;
133   PetscCall(PetscOptionsGetString(NULL,NULL,"-rhs",file[2],sizeof(file[2]),&flg));
134   PetscCall(VecCreate(PETSC_COMM_WORLD,&b));
135   if (flg) {   /* rhs is stored in a separate file */
136     if (file[2][0] == '0' || file[2][0] == 0) {
137       PetscInt    m;
138       PetscScalar one = 1.0;
139       PetscCall(PetscInfo(0,"Using vector of ones for RHS\n"));
140       PetscCall(MatGetLocalSize(A,&m,NULL));
141       PetscCall(VecSetSizes(b,m,PETSC_DECIDE));
142       PetscCall(VecSetFromOptions(b));
143       PetscCall(VecSet(b,one));
144     } else {
145       PetscCall(PetscViewerDestroy(&viewer));
146       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file[2],FILE_MODE_READ,&viewer));
147       PetscCall(VecSetFromOptions(b));
148       PetscCall(VecLoad(b,viewer));
149     }
150   } else {   /* rhs is stored in the same file as matrix */
151     PetscCall(VecSetFromOptions(b));
152     PetscCall(VecLoad(b,viewer));
153   }
154   PetscCall(PetscViewerDestroy(&viewer));
155 
156   /* Make A singular for testing zero-pivot of ilu factorization */
157   /* Example: ./ex72 -f0 <datafile> -test_zeropivot -pc_factor_shift_type <shift_type> */
158   flg  = PETSC_FALSE;
159   PetscCall(PetscOptionsGetBool(NULL,NULL, "-test_zeropivot", &flg,NULL));
160   if (flg) { /* set a row as zeros */
161     PetscInt          row=0;
162     PetscCall(MatSetOption(A,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE));
163     PetscCall(MatZeroRows(A,1,&row,0.0,NULL,NULL));
164   }
165 
166   /* Check whether A is symmetric, then set A->symmetric option */
167   flg = PETSC_FALSE;
168   PetscCall(PetscOptionsGetBool(NULL,NULL, "-check_symmetry", &flg,NULL));
169   if (flg) {
170     PetscCall(MatIsSymmetric(A,0.0,&isSymmetric));
171     if (!isSymmetric) {
172       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Warning: A is non-symmetric \n"));
173     }
174   }
175 
176   /*
177      If the loaded matrix is larger than the vector (due to being padded
178      to match the block size of the system), then create a new padded vector.
179   */
180 
181   PetscCall(MatGetLocalSize(A,NULL,&n));
182   PetscCall(MatGetSize(A,&M,NULL));
183   PetscCall(VecGetSize(b,&m));
184   PetscCall(VecGetLocalSize(b,&p));
185   preload = (PetscBool)(M != m || p != n); /* Global or local dimension mismatch */
186   PetscCall(MPIU_Allreduce(&preload,&flg,1,MPIU_BOOL,MPI_LOR,PetscObjectComm((PetscObject)A)));
187   if (flg) { /* Create a new vector b by padding the old one */
188     PetscInt    j,mvec,start,end,indx;
189     Vec         tmp;
190     PetscScalar *bold;
191 
192     PetscCall(VecCreate(PETSC_COMM_WORLD,&tmp));
193     PetscCall(VecSetSizes(tmp,n,PETSC_DECIDE));
194     PetscCall(VecSetFromOptions(tmp));
195     PetscCall(VecGetOwnershipRange(b,&start,&end));
196     PetscCall(VecGetLocalSize(b,&mvec));
197     PetscCall(VecGetArray(b,&bold));
198     for (j=0; j<mvec; j++) {
199       indx = start+j;
200       PetscCall(VecSetValues(tmp,1,&indx,bold+j,INSERT_VALUES));
201     }
202     PetscCall(VecRestoreArray(b,&bold));
203     PetscCall(VecDestroy(&b));
204     PetscCall(VecAssemblyBegin(tmp));
205     PetscCall(VecAssemblyEnd(tmp));
206     b    = tmp;
207   }
208 
209   PetscCall(MatCreateVecs(A,&x,NULL));
210   PetscCall(VecDuplicate(b,&u));
211   if (initialguessfile) {
212     PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,initialguessfilename,FILE_MODE_READ,&viewer));
213     PetscCall(VecLoad(x,viewer));
214     PetscCall(PetscViewerDestroy(&viewer));
215     initialguess = PETSC_TRUE;
216   } else if (initialguess) {
217     PetscCall(VecSet(x,1.0));
218   } else {
219     PetscCall(VecSet(x,0.0));
220   }
221 
222   /* Check scaling in A */
223   flg  = PETSC_FALSE;
224   PetscCall(PetscOptionsGetBool(NULL,NULL, "-check_scaling", &flg,NULL));
225   if (flg) {
226     Vec       max, min;
227     PetscInt  idx;
228     PetscReal val;
229 
230     PetscCall(VecDuplicate(x, &max));
231     PetscCall(VecDuplicate(x, &min));
232     PetscCall(MatGetRowMaxAbs(A, max, NULL));
233     PetscCall(MatGetRowMinAbs(A, min, NULL));
234     {
235       PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "max.data", &viewer));
236       PetscCall(VecView(max, viewer));
237       PetscCall(PetscViewerDestroy(&viewer));
238       PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, "min.data", &viewer));
239       PetscCall(VecView(min, viewer));
240       PetscCall(PetscViewerDestroy(&viewer));
241     }
242     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
243     PetscCall(VecMax(max, &idx, &val));
244     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
245     PetscCall(VecView(min, PETSC_VIEWER_DRAW_WORLD));
246     PetscCall(VecMin(min, &idx, &val));
247     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest min row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
248     PetscCall(VecMin(max, &idx, &val));
249     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Smallest max row element %g at row %" PetscInt_FMT "\n", (double)val, idx));
250     PetscCall(VecPointwiseDivide(max, max, min));
251     PetscCall(VecMax(max, &idx, &val));
252     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Largest row ratio %g at row %" PetscInt_FMT "\n", (double)val, idx));
253     PetscCall(VecView(max, PETSC_VIEWER_DRAW_WORLD));
254     PetscCall(VecDestroy(&max));
255     PetscCall(VecDestroy(&min));
256   }
257 
258   /*  PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
259   /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
260                     Setup solve for system
261    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
262   /*
263      Conclude profiling last stage; begin profiling next stage.
264   */
265   PetscPreLoadStage("KSPSetUpSolve");
266 
267   /*
268      Create linear solver; set operators; set runtime options.
269   */
270   PetscCall(KSPCreate(PETSC_COMM_WORLD,&ksp));
271   PetscCall(KSPSetInitialGuessNonzero(ksp,initialguess));
272   num_numfac = 1;
273   PetscCall(PetscOptionsGetInt(NULL,NULL,"-num_numfac",&num_numfac,NULL));
274   while (num_numfac--) {
275     PC        pc;
276     PetscBool lsqr,isbddc,ismatis;
277     char      str[32];
278 
279     PetscCall(PetscOptionsGetString(NULL,NULL,"-ksp_type",str,sizeof(str),&lsqr));
280     if (lsqr) {
281       PetscCall(PetscStrcmp("lsqr",str,&lsqr));
282     }
283     if (lsqr) {
284       Mat BtB;
285       PetscCall(MatTransposeMatMult(A,A,MAT_INITIAL_MATRIX,4,&BtB));
286       PetscCall(KSPSetOperators(ksp,A,BtB));
287       PetscCall(MatDestroy(&BtB));
288     } else {
289       PetscCall(KSPSetOperators(ksp,A,A));
290     }
291     PetscCall(KSPSetFromOptions(ksp));
292 
293     /* if we test BDDC, make sure pmat is of type MATIS */
294     PetscCall(KSPGetPC(ksp,&pc));
295     PetscCall(PetscObjectTypeCompare((PetscObject)pc,PCBDDC,&isbddc));
296     PetscCall(PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis));
297     if (isbddc && !ismatis) {
298       Mat J;
299 
300       PetscCall(MatConvert(A,MATIS,MAT_INITIAL_MATRIX,&J));
301       PetscCall(KSPSetOperators(ksp,A,J));
302       PetscCall(MatDestroy(&J));
303     }
304 
305     /*
306      Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
307      enable more precise profiling of setting up the preconditioner.
308      These calls are optional, since both will be called within
309      KSPSolve() if they haven't been called already.
310     */
311     PetscCall(KSPSetUp(ksp));
312     PetscCall(KSPSetUpOnBlocks(ksp));
313 
314     /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
315                          Solve system
316       - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
317 
318     /*
319      Solve linear system;
320     */
321     if (trans) {
322       PetscCall(KSPSolveTranspose(ksp,b,x));
323       PetscCall(KSPGetIterationNumber(ksp,&its));
324     } else {
325       PetscInt num_rhs=1;
326       PetscCall(PetscOptionsGetInt(NULL,NULL,"-num_rhs",&num_rhs,NULL));
327       cknorm = PETSC_FALSE;
328       PetscCall(PetscOptionsGetBool(NULL,NULL,"-cknorm",&cknorm,NULL));
329       while (num_rhs--) {
330         if (num_rhs == 1) VecSet(x,0.0);
331         PetscCall(KSPSolve(ksp,b,x));
332       }
333       PetscCall(KSPGetIterationNumber(ksp,&its));
334       if (cknorm) {     /* Check error for each rhs */
335         if (trans) {
336           PetscCall(MatMultTranspose(A,x,u));
337         } else {
338           PetscCall(MatMult(A,x,u));
339         }
340         PetscCall(VecAXPY(u,-1.0,b));
341         PetscCall(VecNorm(u,NORM_2,&norm));
342         PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Number of iterations = %3" PetscInt_FMT "\n",its));
343         if (!PetscIsNanScalar(norm)) {
344           if (norm < 1.e-12) {
345             PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Residual norm < 1.e-12\n"));
346           } else {
347             PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Residual norm %g\n",(double)norm));
348           }
349         }
350       }
351     }   /* while (num_rhs--) */
352 
353     /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
354           Check error, print output, free data structures.
355      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
356 
357     /*
358        Check error
359     */
360     if (trans) {
361       PetscCall(MatMultTranspose(A,x,u));
362     } else {
363       PetscCall(MatMult(A,x,u));
364     }
365     PetscCall(VecAXPY(u,-1.0,b));
366     PetscCall(VecNorm(u,NORM_2,&norm));
367     /*
368      Write output (optionally using table for solver details).
369       - PetscPrintf() handles output for multiprocessor jobs
370         by printing from only one processor in the communicator.
371       - KSPView() prints information about the linear solver.
372     */
373     if (table) {
374       char        *matrixname,kspinfo[120];
375 
376       /*
377        Open a string viewer; then write info to it.
378       */
379       PetscCall(PetscViewerStringOpen(PETSC_COMM_WORLD,kspinfo,sizeof(kspinfo),&viewer));
380       PetscCall(KSPView(ksp,viewer));
381       PetscCall(PetscStrrchr(file[PetscPreLoadIt],'/',&matrixname));
382       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%-8.8s %3" PetscInt_FMT " %2.0e %s \n",matrixname,its,(double)norm,kspinfo));
383 
384       /*
385         Destroy the viewer
386       */
387       PetscCall(PetscViewerDestroy(&viewer));
388     } else {
389       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %3" PetscInt_FMT "\n",its));
390       if (!PetscIsNanScalar(norm)) {
391         if (norm < 1.e-12 && !PetscIsNanScalar((PetscScalar)norm)) {
392           PetscCall(PetscPrintf(PETSC_COMM_WORLD,"  Residual norm < 1.e-12\n"));
393         } else {
394           PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Residual norm %g\n",(double)norm));
395         }
396       }
397     }
398     PetscCall(PetscOptionsGetString(NULL,NULL,"-solution",file[3],sizeof(file[3]),&flg));
399     if (flg) {
400       Vec         xstar;
401       PetscReal   norm;
402 
403       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file[3],FILE_MODE_READ,&viewer));
404       PetscCall(VecCreate(PETSC_COMM_WORLD,&xstar));
405       PetscCall(VecLoad(xstar,viewer));
406       PetscCall(VecAXPY(xstar, -1.0, x));
407       PetscCall(VecNorm(xstar, NORM_2, &norm));
408       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error norm %g\n", (double)norm));
409       PetscCall(VecDestroy(&xstar));
410       PetscCall(PetscViewerDestroy(&viewer));
411     }
412     if (outputSoln) {
413       PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD,"solution.petsc",FILE_MODE_WRITE,&viewer));
414       PetscCall(VecView(x, viewer));
415       PetscCall(PetscViewerDestroy(&viewer));
416     }
417 
418     flg  = PETSC_FALSE;
419     PetscCall(PetscOptionsGetBool(NULL,NULL, "-ksp_reason", &flg,NULL));
420     if (flg) {
421       KSPConvergedReason reason;
422       PetscCall(KSPGetConvergedReason(ksp,&reason));
423       PetscCall(PetscPrintf(PETSC_COMM_WORLD,"KSPConvergedReason: %s\n", KSPConvergedReasons[reason]));
424     }
425 
426   }   /* while (num_numfac--) */
427 
428   /*
429      Free work space.  All PETSc objects should be destroyed when they
430      are no longer needed.
431   */
432   PetscCall(MatDestroy(&A)); PetscCall(VecDestroy(&b));
433   PetscCall(VecDestroy(&u)); PetscCall(VecDestroy(&x));
434   PetscCall(KSPDestroy(&ksp));
435   PetscPreLoadEnd();
436   /* -----------------------------------------------------------
437                       End of linear solver loop
438      ----------------------------------------------------------- */
439 
440   PetscCall(PetscFinalize());
441   return 0;
442 }
443 
444 /*TEST
445 
446    build:
447       requires: !complex
448 
449    testset:
450       suffix: 1
451       nsize: 2
452       args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
453       requires: !__float128
454 
455    testset:
456       suffix: 1a
457       args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@
458       requires: !__float128
459 
460    testset:
461       nsize: 2
462       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
463       args: -f0 ${DATAFILESPATH}/matrices/medium
464       args:  -ksp_type bicg
465       test:
466          suffix: 2
467 
468    testset:
469       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
470       args: -f0 ${DATAFILESPATH}/matrices/medium
471       args: -ksp_type bicg
472       test:
473          suffix: 4
474          args: -pc_type lu
475       test:
476          suffix: 5
477 
478    testset:
479       suffix: 6
480       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
481       args: -f0 ${DATAFILESPATH}/matrices/fem1
482       args: -pc_factor_levels 2 -pc_factor_fill 1.73 -ksp_gmres_cgs_refinement_type refine_always
483 
484    testset:
485       TODO: Matrix row/column sizes are not compatible with block size
486       suffix: 7
487       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
488       args: -f0 ${DATAFILESPATH}/matrices/medium
489       args: -viewer_binary_skip_info -mat_type seqbaij
490       args: -matload_block_size {{2 3 4 5 6 7 8}separate output}
491       args: -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always
492       args: -ksp_rtol 1.0e-15 -ksp_monitor_short
493       test:
494          suffix: a
495       test:
496          suffix: b
497          args: -pc_factor_mat_ordering_type nd
498       test:
499          suffix: c
500          args: -pc_factor_levels 1
501       test:
502          requires: metis
503          suffix: d
504          args: -pc_factor_mat_ordering_type metisnd
505 
506    testset:
507       TODO: Matrix row/column sizes are not compatible with block size
508       suffix: 7_d
509       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
510       args: -f0 ${DATAFILESPATH}/matrices/medium
511       args: -viewer_binary_skip_info -mat_type seqbaij
512       args: -matload_block_size {{2 3 4 5 6 7 8}shared output}
513       args: -ksp_type preonly -pc_type lu
514 
515    testset:
516       suffix: 8
517       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
518       args: -f0 ${DATAFILESPATH}/matrices/medium
519       args: -ksp_diagonal_scale -pc_type eisenstat -ksp_monitor_short -ksp_diagonal_scale_fix -ksp_gmres_cgs_refinement_type refine_always -mat_no_inode
520 
521    testset:
522       TODO: Matrix row/column sizes are not compatible with block size
523       suffix: 9
524       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
525       args: -f0 ${DATAFILESPATH}/matrices/medium
526       args: -viewer_binary_skip_info  -matload_block_size {{1 2 3 4 5 6 7}separate output} -ksp_max_it 100 -ksp_gmres_cgs_refinement_type refine_always -ksp_rtol 1.0e-15 -ksp_monitor_short
527       test:
528          suffix: a
529          args: -mat_type seqbaij
530       test:
531          suffix: b
532          args: -mat_type seqbaij -trans
533       test:
534          suffix: c
535          nsize: 2
536          args: -mat_type mpibaij
537       test:
538          suffix: d
539          nsize: 2
540          args: -mat_type mpibaij -trans
541       test:
542          suffix: e
543          nsize: 3
544          args: -mat_type mpibaij
545       test:
546          suffix: f
547          nsize: 3
548          args: -mat_type mpibaij -trans
549 
550    testset:
551       suffix: 10
552       nsize: 2
553       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
554       args: -ksp_type fgmres -pc_type ksp -f0 ${DATAFILESPATH}/matrices/medium -ksp_fgmres_modifypcksp -ksp_monitor_short
555 
556    testset:
557       suffix: 12
558       requires: matlab
559       args: -pc_type lu -pc_factor_mat_solver_type matlab -f0 ${DATAFILESPATH}/matrices/arco1
560 
561    testset:
562       suffix: 13
563       requires: lusol
564       args: -f0 ${DATAFILESPATH}/matrices/arco1
565       args: -mat_type lusol -pc_type lu
566 
567    testset:
568       nsize: 3
569       args: -f0 ${DATAFILESPATH}/matrices/medium
570       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
571       test:
572          suffix: 14
573          requires: spai
574          args: -pc_type spai
575       test:
576          suffix: 15
577          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
578          args: -pc_type hypre -pc_hypre_type pilut
579       test:
580          suffix: 16
581          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
582          args: -pc_type hypre -pc_hypre_type parasails
583       test:
584          suffix: 17
585          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
586          args: -pc_type hypre -pc_hypre_type boomeramg
587       test:
588          suffix: 18
589          requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
590          args: -pc_type hypre -pc_hypre_type euclid
591 
592    testset:
593       suffix: 19
594       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
595       args: -f0 ${DATAFILESPATH}/matrices/poisson1
596       args: -ksp_type cg -pc_type icc
597       args: -pc_factor_levels {{0 2 4}separate output}
598       test:
599       test:
600          args: -mat_type seqsbaij
601 
602    testset:
603       suffix: ILU
604       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
605       args: -f0 ${DATAFILESPATH}/matrices/small
606       args: -pc_factor_levels 1
607       test:
608       test:
609          # This is tested against regular ILU (used to be denoted ILUBAIJ)
610          args: -mat_type baij
611 
612    testset:
613       suffix: aijcusparse
614       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) cuda
615       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info -mat_type aijcusparse -pc_factor_mat_solver_type cusparse -pc_type ilu -vec_type cuda
616 
617    testset:
618       TODO: No output file. Need to determine if deprecated
619       suffix: asm_viennacl
620       nsize: 2
621       requires: viennacl
622       args: -pc_type asm -pc_asm_sub_mat_type aijviennacl -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int${PETSC_INDEX_SIZE}-float${PETSC_SCALAR_SIZE}
623 
624    testset:
625       nsize: 2
626       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
627       args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz -ksp_monitor_short -ksp_rtol 1.E-9 -pc_type hypre -pc_hypre_type boomeramg
628       test:
629          suffix: boomeramg_euclid
630          args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01
631          TODO: Need to determine if deprecated
632       test:
633          suffix: boomeramg_euclid_bj
634          args: -pc_hypre_boomeramg_smooth_type Euclid -pc_hypre_boomeramg_smooth_num_levels 2 -pc_hypre_boomeramg_eu_level 1 -pc_hypre_boomeramg_eu_droptolerance 0.01 -pc_hypre_boomeramg_eu_bj
635          TODO: Need to determine if deprecated
636       test:
637          suffix: boomeramg_parasails
638          args: -pc_hypre_boomeramg_smooth_type ParaSails -pc_hypre_boomeramg_smooth_num_levels 2
639       test:
640          suffix: boomeramg_pilut
641          args: -pc_hypre_boomeramg_smooth_type Pilut -pc_hypre_boomeramg_smooth_num_levels 2
642       test:
643          suffix: boomeramg_schwarz
644          args: -pc_hypre_boomeramg_smooth_type Schwarz-smoothers
645 
646    testset:
647       suffix: cg_singlereduction
648       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
649       args: -f0 ${DATAFILESPATH}/matrices/small
650       args: -mat_type mpisbaij -ksp_type cg -pc_type eisenstat -ksp_monitor_short -ksp_converged_reason
651       test:
652       test:
653          args: -ksp_cg_single_reduction
654 
655    testset:
656       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
657       args: -f0 ${DATAFILESPATH}/matrices/poisson2.gz
658       args: -ksp_monitor_short -pc_type icc
659       test:
660          suffix: cr
661          args: -ksp_type cr
662       test:
663          suffix: lcd
664          args: -ksp_type lcd
665 
666    testset:
667       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
668       args: -f0 ${DATAFILESPATH}/matrices/small
669       args: -ksp_monitor_short -ksp_view -mat_view ascii::ascii_info
670       test:
671          suffix: seqaijcrl
672          args: -mat_type seqaijcrl
673       test:
674          suffix: seqaijperm
675          args: -mat_type seqaijperm
676 
677    testset:
678       nsize: 2
679       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
680       args: -f0 ${DATAFILESPATH}/matrices/small
681       args: -ksp_monitor_short -ksp_view
682       # Different output files
683       test:
684          suffix: mpiaijcrl
685          args: -mat_type mpiaijcrl
686       test:
687          suffix: mpiaijperm
688          args: -mat_type mpiaijperm
689 
690    testset:
691       nsize: 4
692       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !defined(PETSC_HAVE_I_MPI_NUMVERSION)
693       args: -ksp_monitor_short -ksp_view
694       test:
695          suffix: xxt
696          args: -f0 ${DATAFILESPATH}/matrices/poisson1 -check_symmetry -ksp_type cg -pc_type tfs
697       test:
698          suffix: xyt
699          args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type gmres -pc_type tfs
700 
701    testset:
702       # The output file here is the same as mumps
703       suffix: mumps_cholesky
704       output_file: output/ex72_mumps.out
705       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
706       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type cholesky -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
707       nsize: {{1 2}}
708       test:
709          args: -mat_type sbaij -mat_ignore_lower_triangular
710       test:
711          args: -mat_type aij
712       test:
713          args: -mat_type aij -matload_spd
714 
715    testset:
716       # The output file here is the same as mumps
717       suffix: mumps_lu
718       output_file: output/ex72_mumps.out
719       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
720       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
721       test:
722          args: -mat_type seqaij
723       test:
724          nsize: 2
725          args: -mat_type mpiaij
726       test:
727          args: -mat_type seqbaij -matload_block_size 2
728       test:
729          nsize: 2
730          args: -mat_type mpibaij -matload_block_size 2
731       test:
732          args: -mat_type aij -mat_mumps_icntl_7 5
733          TODO: Need to determine if deprecated
734 
735    test:
736       suffix: mumps_lu_parmetis
737       output_file: output/ex72_mumps.out
738       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps parmetis
739       nsize: 2
740       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 2
741 
742    test:
743       suffix: mumps_lu_ptscotch
744       output_file: output/ex72_mumps.out
745       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps ptscotch
746       nsize: 2
747       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2 -mat_type mpiaij -mat_mumps_icntl_28 2 -mat_mumps_icntl_29 1
748 
749    testset:
750       # The output file here is the same as mumps
751       suffix: mumps_redundant
752       output_file: output/ex72_mumps_redundant.out
753       nsize: 8
754       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
755       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type mumps -num_numfac 2 -num_rhs 2
756 
757    testset:
758       suffix: pastix_cholesky
759       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
760       output_file: output/ex72_mumps.out
761       nsize: {{1 2}}
762       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2 -pc_type cholesky -mat_type sbaij -mat_ignore_lower_triangular
763 
764    testset:
765       suffix: pastix_lu
766       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
767       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2
768       output_file: output/ex72_mumps.out
769       test:
770          args: -mat_type seqaij
771       test:
772          nsize: 2
773          args: -mat_type mpiaij
774 
775    testset:
776       suffix: pastix_redundant
777       output_file: output/ex72_mumps_redundant.out
778       nsize: 8
779       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) pastix
780       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type pastix -num_numfac 2 -num_rhs 2
781 
782    testset:
783       suffix: superlu_dist_lu
784       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
785       output_file: output/ex72_mumps.out
786       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
787       nsize: {{1 2}}
788 
789    testset:
790       suffix: superlu_dist_redundant
791       nsize: 8
792       output_file: output/ex72_mumps_redundant.out
793       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu_dist
794       args: -f0 ${DATAFILESPATH}/matrices/medium -ksp_type preonly -pc_type redundant -pc_redundant_number {{8 7 6 5 4 3 2 1}} -redundant_pc_factor_mat_solver_type superlu_dist -num_numfac 2 -num_rhs 2
795 
796    testset:
797       suffix: superlu_lu
798       output_file: output/ex72_mumps.out
799       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) superlu
800       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -pc_factor_mat_solver_type superlu -num_numfac 2 -num_rhs 2
801 
802    testset:
803       suffix: umfpack
804       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) suitesparse
805       args: -f0 ${DATAFILESPATH}/matrices/small -ksp_type preonly -pc_type lu -mat_type seqaij -pc_factor_mat_solver_type umfpack -num_numfac 2 -num_rhs 2
806 
807    testset:
808       suffix: zeropivot
809       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps
810       args: -f0 ${DATAFILESPATH}/matrices/small -test_zeropivot -ksp_converged_reason -ksp_type fgmres -pc_type ksp
811       test:
812          nsize: 3
813          args: -ksp_pc_type bjacobi
814       test:
815          nsize: 2
816          args: -ksp_ksp_type cg -ksp_pc_type bjacobi -ksp_pc_bjacobi_blocks 1
817       #test:
818          #nsize: 3
819          #args: -ksp_ksp_converged_reason -ksp_pc_type bjacobi -ksp_sub_ksp_converged_reason
820          #TODO: Need to determine if deprecated
821 
822    testset:
823       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
824       args: -mat_convert_type is -f0 ${DATAFILESPATH}/matrices/medium -ksp_type fgmres
825       test:
826          suffix: aij_gdsw
827          nsize: 4
828          args: -mat_convert_type aij -pc_type mg -pc_mg_levels 2 -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
829       test:
830          output_file: output/ex72_aij_gdsw.out
831          suffix: is_gdsw
832          nsize: 4
833          args: -pc_type mg -pc_mg_levels 2  -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_galerkin -mg_levels_pc_type asm
834       test:
835          suffix: is_asm
836          nsize: {{1 2}separate output}
837          args: -pc_type asm
838       test:
839          suffix: bddc_seq
840          nsize: 1
841          args: -pc_type bddc
842       test:
843          suffix: bddc_par
844          nsize: 2
845          args: -pc_type bddc
846       test:
847          requires: parmetis
848          suffix: bddc_par_nd_parmetis
849          filter: sed -e "s/Number of iterations =   [0-9]/Number of iterations = 9/g"
850          nsize: 4
851          args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type parmetis
852       test:
853          requires: ptscotch defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
854          suffix: bddc_par_nd_ptscotch
855          filter: sed -e "s/Number of iterations =   [0-9]/Number of iterations = 9/g"
856          nsize: 4
857          args: -ksp_error_if_not_converged -pc_type bddc -mat_is_disassemble_l2g_type nd -mat_partitioning_type ptscotch
858 
859    testset:
860       requires: !__float128 hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
861       test:
862          suffix: hpddm_mat
863          output_file: output/ex72_bddc_seq.out
864          filter: sed -e "s/Number of iterations =   2/Number of iterations =   1/g"
865          nsize: 2
866          args: -f0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int@PETSC_INDEX_SIZE@-float@PETSC_SCALAR_SIZE@ -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_pc_type mat
867       test:
868          requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES)
869          suffix: hpddm_gen_non_hermitian
870          output_file: output/ex72_2.out
871          nsize: 4
872          args: -f0 ${DATAFILESPATH}/matrices/arco1 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.7 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right
873       test:
874          requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) mumps !defined(PETSCTEST_VALGRIND)
875          suffix: hpddm_gen_non_hermitian_baij
876          output_file: output/ex72_5.out
877          nsize: 4
878          timeoutfactor: 2
879          args: -f0 ${DATAFILESPATH}/matrices/arco6 -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 30 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_eps_gen_non_hermitian -pc_hpddm_coarse_mat_type baij -pc_hpddm_block_splitting -pc_hpddm_levels_1_eps_threshold 0.8 -pc_hpddm_coarse_pc_type lu -ksp_pc_side right -mat_type baij -pc_hpddm_levels_1_sub_pc_factor_mat_solver_type mumps -pc_hpddm_levels_1_eps_tol 1.0e-2
880 TEST*/
881