xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision cf8ae1d3cf0eec4bff4f7d4eff91937e3edf3150)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
7 
8 #if defined PETSC_GAMG_USE_LOG
9 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10 #endif
11 
12 #if defined PETSC_USE_LOG
13 PetscLogEvent PC_GAMGGraph_AGG;
14 PetscLogEvent PC_GAMGGraph_GEO;
15 PetscLogEvent PC_GAMGCoarsen_AGG;
16 PetscLogEvent PC_GAMGCoarsen_GEO;
17 PetscLogEvent PC_GAMGProlongator_AGG;
18 PetscLogEvent PC_GAMGProlongator_GEO;
19 PetscLogEvent PC_GAMGOptProlongator_AGG;
20 #endif
21 
22 #define GAMG_MAXLEVELS 30
23 
24 /* #define GAMG_STAGES */
25 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
26 static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
27 #endif
28 
29 static PetscFunctionList GAMGList = 0;
30 static PetscBool PCGAMGPackageInitialized;
31 
32 /* ----------------------------------------------------------------------------- */
33 #undef __FUNCT__
34 #define __FUNCT__ "PCReset_GAMG"
35 PetscErrorCode PCReset_GAMG(PC pc)
36 {
37   PetscErrorCode ierr;
38   PC_MG          *mg      = (PC_MG*)pc->data;
39   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
40 
41   PetscFunctionBegin;
42   if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n");
43   pc_gamg->data_sz = 0;
44   ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr);
45   PetscFunctionReturn(0);
46 }
47 
48 /* -------------------------------------------------------------------------- */
49 /*
50    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
51      of active processors.
52 
53    Input Parameter:
54    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
55           'pc_gamg->data_sz' are changed via repartitioning/reduction.
56    . Amat_fine - matrix on this fine (k) level
57    . cr_bs - coarse block size
58    In/Output Parameter:
59    . a_P_inout - prolongation operator to the next level (k-->k-1)
60    . a_nactive_proc - number of active procs
61    Output Parameter:
62    . a_Amat_crs - coarse matrix that is created (k-1)
63 */
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "PCGAMGCreateLevel_GAMG"
67 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last)
68 {
69   PetscErrorCode  ierr;
70   PC_MG           *mg         = (PC_MG*)pc->data;
71   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
72   Mat             Cmat,Pold=*a_P_inout;
73   MPI_Comm        comm;
74   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
75   PetscInt        ncrs_eq,ncrs,f_bs;
76 
77   PetscFunctionBegin;
78   ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr);
79   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
80   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
81   ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr);
82   ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr);
83 
84   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
85   ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr);
86   if (pc_gamg->data_cell_rows>0) {
87     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
88   } else {
89     PetscInt  bs;
90     ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr);
91     ncrs = ncrs_eq/bs;
92   }
93 
94   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
95   if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1;
96   else {
97     PetscInt ncrs_eq_glob;
98     ierr     = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr);
99     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
100     if (!new_size) new_size = 1; /* not likely, posible? */
101     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
102   }
103 
104   if (Pcolumnperm) *Pcolumnperm = NULL;
105 
106   if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
107   else {
108     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
109     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;
110 
111     nloc_old = ncrs_eq/cr_bs;
112     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
113 #if defined PETSC_GAMG_USE_LOG
114     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
115 #endif
116     /* make 'is_eq_newproc' */
117     ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
118     if (pc_gamg->repart) {
119       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
120       Mat adj;
121 
122      ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr);
123 
124       /* get 'adj' */
125       if (cr_bs == 1) {
126         ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
127       } else {
128         /* make a scalar matrix to partition (no Stokes here) */
129         Mat               tMat;
130         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
131         const PetscScalar *vals;
132         const PetscInt    *idx;
133         PetscInt          *d_nnz, *o_nnz, M, N;
134         static PetscInt   llev = 0;
135         MatType           mtype;
136 
137         ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr);
138         ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr);
139         ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr);
140         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
141           ierr      = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
142           d_nnz[jj] = ncols/cr_bs;
143           o_nnz[jj] = ncols/cr_bs;
144           ierr      = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
145           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
146           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
147         }
148 
149         ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr);
150         ierr = MatCreate(comm, &tMat);CHKERRQ(ierr);
151         ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr);
152         ierr = MatSetType(tMat,mtype);CHKERRQ(ierr);
153         ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr);
154         ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
155         ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
156 
157         for (ii = Istart_crs; ii < Iend_crs; ii++) {
158           PetscInt dest_row = ii/cr_bs;
159           ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
160           for (jj = 0; jj < ncols; jj++) {
161             PetscInt    dest_col = idx[jj]/cr_bs;
162             PetscScalar v        = 1.0;
163             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr);
164           }
165           ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
166         }
167         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
168         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
169 
170         if (llev++ == -1) {
171           PetscViewer viewer; char fname[32];
172           ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr);
173           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
174           ierr = MatView(tMat, viewer);CHKERRQ(ierr);
175           ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
176         }
177         ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
178         ierr = MatDestroy(&tMat);CHKERRQ(ierr);
179       } /* create 'adj' */
180 
181       { /* partition: get newproc_idx */
182         char            prefix[256];
183         const char      *pcpre;
184         const PetscInt  *is_idx;
185         MatPartitioning mpart;
186         IS              proc_is;
187         PetscInt        targetPE;
188 
189         ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr);
190         ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr);
191         ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
192         ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
193         ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
194         ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
195         ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr);
196         ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr);
197         ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
198 
199         /* collect IS info */
200         ierr     = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr);
201         ierr     = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr);
202         targetPE = 1; /* bring to "front" of machine */
203         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
204         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
205           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
206             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
207           }
208         }
209         ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr);
210         ierr = ISDestroy(&proc_is);CHKERRQ(ierr);
211       }
212       ierr = MatDestroy(&adj);CHKERRQ(ierr);
213 
214       ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr);
215       ierr = PetscFree(newproc_idx);CHKERRQ(ierr);
216     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
217       PetscInt rfactor,targetPE;
218 
219       /* find factor */
220       if (new_size == 1) rfactor = size; /* easy */
221       else {
222         PetscReal best_fact = 0.;
223         jj = -1;
224         for (kk = 1 ; kk <= size ; kk++) {
225           if (!(size%kk)) { /* a candidate */
226             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
227             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
228             if (fact > best_fact) {
229               best_fact = fact; jj = kk;
230             }
231           }
232         }
233         if (jj != -1) rfactor = jj;
234         else rfactor = 1; /* does this happen .. a prime */
235       }
236       new_size = size/rfactor;
237 
238       if (new_size==nactive) {
239         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
240         ierr        = PetscFree(counts);CHKERRQ(ierr);
241         ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr);
242 #if defined PETSC_GAMG_USE_LOG
243         ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
244 #endif
245         PetscFunctionReturn(0);
246       }
247 
248       ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr);
249       targetPE = rank/rfactor;
250       ierr     = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr);
251     } /* end simple 'is_eq_newproc' */
252 
253     /*
254      Create an index set from the is_eq_newproc index set to indicate the mapping TO
255      */
256     ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr);
257     is_eq_num_prim = is_eq_num;
258     /*
259       Determine how many equations/vertices are assigned to each processor
260      */
261     ierr        = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr);
262     ncrs_eq_new = counts[rank];
263     ierr        = ISDestroy(&is_eq_newproc);CHKERRQ(ierr);
264     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */
265 
266     ierr = PetscFree(counts);CHKERRQ(ierr);
267 #if defined PETSC_GAMG_USE_LOG
268     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
269 #endif
270     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
271     {
272     Vec            src_crd, dest_crd;
273     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
274     VecScatter     vecscat;
275     PetscScalar    *array;
276     IS isscat;
277 
278     /* move data (for primal equations only) */
279     /* Create a vector to contain the newly ordered element information */
280     ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr);
281     ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr);
282     ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */
283     /*
284      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
285      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
286      */
287     ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr);
288     ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
289     for (ii=0,jj=0; ii<ncrs; ii++) {
290       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
291       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
292     }
293     ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
294     ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr);
295     ierr = PetscFree(tidx);CHKERRQ(ierr);
296     /*
297      Create a vector to contain the original vertex information for each element
298      */
299     ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr);
300     for (jj=0; jj<ndata_cols; jj++) {
301       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
302       for (ii=0; ii<ncrs; ii++) {
303         for (kk=0; kk<ndata_rows; kk++) {
304           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
305           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
306           ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr);
307         }
308       }
309     }
310     ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr);
311     ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr);
312     /*
313       Scatter the element vertex information (still in the original vertex ordering)
314       to the correct processor
315     */
316     ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr);
317     ierr = ISDestroy(&isscat);CHKERRQ(ierr);
318     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
319     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
320     ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr);
321     ierr = VecDestroy(&src_crd);CHKERRQ(ierr);
322     /*
323       Put the element vertex data into a new allocation of the gdata->ele
324     */
325     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
326     ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr);
327 
328     pc_gamg->data_sz = node_data_sz*ncrs_new;
329     strideNew        = ncrs_new*ndata_rows;
330 
331     ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr);
332     for (jj=0; jj<ndata_cols; jj++) {
333       for (ii=0; ii<ncrs_new; ii++) {
334         for (kk=0; kk<ndata_rows; kk++) {
335           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
336           pc_gamg->data[ix] = PetscRealPart(array[jx]);
337         }
338       }
339     }
340     ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr);
341     ierr = VecDestroy(&dest_crd);CHKERRQ(ierr);
342     }
343     /* move A and P (columns) with new layout */
344 #if defined PETSC_GAMG_USE_LOG
345     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
346 #endif
347 
348     /*
349       Invert for MatGetSubMatrix
350     */
351     ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr);
352     ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */
353     ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr);
354     if (is_eq_num != is_eq_num_prim) {
355       ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */
356     }
357     if (Pcolumnperm) {
358       ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr);
359       *Pcolumnperm = new_eq_indices;
360     }
361     ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr);
362 #if defined PETSC_GAMG_USE_LOG
363     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
364     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
365 #endif
366     /* 'a_Amat_crs' output */
367     {
368       Mat mat;
369       ierr        = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr);
370       *a_Amat_crs = mat;
371     }
372     ierr = MatDestroy(&Cmat);CHKERRQ(ierr);
373 
374 #if defined PETSC_GAMG_USE_LOG
375     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
376 #endif
377     /* prolongator */
378     {
379       IS       findices;
380       PetscInt Istart,Iend;
381       Mat      Pnew;
382 
383       ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr);
384 #if defined PETSC_GAMG_USE_LOG
385       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
386 #endif
387       ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr);
388       ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr);
389       ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr);
390       ierr = ISDestroy(&findices);CHKERRQ(ierr);
391 
392 #if defined PETSC_GAMG_USE_LOG
393       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
394 #endif
395       ierr = MatDestroy(a_P_inout);CHKERRQ(ierr);
396 
397       /* output - repartitioned */
398       *a_P_inout = Pnew;
399     }
400     ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr);
401 
402     *a_nactive_proc = new_size; /* output */
403   }
404   PetscFunctionReturn(0);
405 }
406 
407 /* -------------------------------------------------------------------------- */
408 /*
409    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
410                     by setting data structures and options.
411 
412    Input Parameter:
413 .  pc - the preconditioner context
414 
415 */
416 #undef __FUNCT__
417 #define __FUNCT__ "PCSetUp_GAMG"
418 PetscErrorCode PCSetUp_GAMG(PC pc)
419 {
420   PetscErrorCode ierr;
421   PC_MG          *mg      = (PC_MG*)pc->data;
422   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
423   Mat            Pmat     = pc->pmat;
424   PetscInt       fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
425   MPI_Comm       comm;
426   PetscMPIInt    rank,size,nactivepe;
427   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
428   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
429   PetscLogDouble nnz0=0.,nnztot=0.;
430   MatInfo        info;
431   PetscBool      is_last = PETSC_FALSE;
432 
433   PetscFunctionBegin;
434   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
435   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
436   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
437 
438   if (pc_gamg->setup_count++ > 0) {
439     if ((PetscBool)(!pc_gamg->reuse_prol)) {
440       /* reset everything */
441       ierr = PCReset_MG(pc);CHKERRQ(ierr);
442       pc->setupcalled = 0;
443     } else {
444       PC_MG_Levels **mglevels = mg->levels;
445       /* just do Galerkin grids */
446       Mat          B,dA,dB;
447 
448      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
449       if (pc_gamg->Nlevels > 1) {
450         /* currently only handle case where mat and pmat are the same on coarser levels */
451         ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr);
452         /* (re)set to get dirty flag */
453         ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr);
454 
455         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
456           /* the first time through the matrix structure has changed from repartitioning */
457           if (pc_gamg->setup_count==2) {
458             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
459             ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
460 
461             mglevels[level]->A = B;
462           } else {
463             ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr);
464             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
465           }
466           ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr);
467           dB   = B;
468         }
469       }
470 
471       ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
472       PetscFunctionReturn(0);
473     }
474   }
475 
476   if (!pc_gamg->data) {
477     if (pc_gamg->orig_data) {
478       ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
479       ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr);
480 
481       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
482       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
483       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
484 
485       ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr);
486       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
487     } else {
488       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
489       ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr);
490     }
491   }
492 
493   /* cache original data for reuse */
494   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
495     ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr);
496     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
497     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
498     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
499   }
500 
501   /* get basic dims */
502   ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
503   ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr);
504 
505   ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
506   nnz0   = info.nz_used;
507   nnztot = info.nz_used;
508   ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr);
509 
510   /* Get A_i and R_i */
511   for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) {
512     pc_gamg->current_level = level;
513     level1 = level + 1;
514 #if defined PETSC_GAMG_USE_LOG
515     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
516 #if (defined GAMG_STAGES)
517     ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr);
518 #endif
519 #endif
520     { /* construct prolongator */
521       Mat              Gmat;
522       PetscCoarsenData *agg_lists;
523       Mat              Prol11;
524 
525       ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr);
526       ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr);
527       ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr);
528 
529       /* could have failed to create new level */
530       if (Prol11) {
531         /* get new block size of coarse matrices */
532         ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr);
533 
534         if (pc_gamg->ops->optprolongator) {
535           /* smooth */
536           ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr);
537         }
538 
539         Parr[level1] = Prol11;
540       } else Parr[level1] = NULL; /* failed to coarsen */
541 
542       if (pc_gamg->use_aggs_in_asm) {
543         PetscInt bs;
544         ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr);
545         ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr);
546       }
547 
548       ierr = MatDestroy(&Gmat);CHKERRQ(ierr);
549       ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr);
550     } /* construct prolongator scope */
551 #if defined PETSC_GAMG_USE_LOG
552     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
553 #endif
554     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
555     if (!Parr[level1]) { /* failed to coarsen */
556       ierr =  PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr);
557 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES
558       ierr = PetscLogStagePop();CHKERRQ(ierr);
559 #endif
560       break;
561     }
562 #if defined PETSC_GAMG_USE_LOG
563     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
564 #endif
565     ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */
566     if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????");
567     if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE;
568     ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr);
569 
570 #if defined PETSC_GAMG_USE_LOG
571     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
572 #endif
573     ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */
574     ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr);
575     nnztot += info.nz_used;
576     ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr);
577 
578 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
579     ierr = PetscLogStagePop();CHKERRQ(ierr);
580 #endif
581     /* stop if one node or one proc -- could pull back for singular problems */
582     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
583       ierr =  PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr);
584       level++;
585       break;
586     }
587   } /* levels */
588   ierr                  = PetscFree(pc_gamg->data);CHKERRQ(ierr);
589 
590   ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr);
591   pc_gamg->Nlevels = level + 1;
592   fine_level       = level;
593   ierr             = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
594 
595   if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
596     /* set default smoothers & set operators */
597     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
598       KSP smoother;
599       PC  subpc;
600 
601       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
602       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
603 
604       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
605       /* set ops */
606       ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr);
607       ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr);
608 
609       /* set defaults */
610       ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr);
611 
612       /* set blocks for ASM smoother that uses the 'aggregates' */
613       if (pc_gamg->use_aggs_in_asm) {
614         PetscInt sz;
615         IS       *iss;
616 
617         sz   = nASMBlocksArr[level];
618         iss   = ASMLocalIDsArr[level];
619         ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr);
620         ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr);
621         ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr);
622         if (!sz) {
623           IS       is;
624           ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
625           ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr);
626           ierr = ISDestroy(&is);CHKERRQ(ierr);
627         } else {
628           PetscInt kk;
629           ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr);
630           for (kk=0; kk<sz; kk++) {
631             ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr);
632           }
633           ierr = PetscFree(iss);CHKERRQ(ierr);
634         }
635         ASMLocalIDsArr[level] = NULL;
636         nASMBlocksArr[level]  = 0;
637       } else {
638         ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr);
639       }
640     }
641     {
642       /* coarse grid */
643       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
644       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
645       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
646       ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr);
647       if (!pc_gamg->use_parallel_coarse_grid_solver) {
648         ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
649         ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
650         ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr);
651         ierr = PCSetUp(subpc);CHKERRQ(ierr);
652         ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
653         if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
654         ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
655         ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr);
656         ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
657         ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
658         ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr);
659         /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
660          * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
661          * view every subdomain as though they were different. */
662         ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
663       }
664     }
665 
666     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
667     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
668     ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr);
669     ierr = PetscOptionsEnd();CHKERRQ(ierr);
670     if (!mg->galerkin) SETERRQ(comm,PETSC_ERR_USER,"PCGAMG must use Galerkin for coarse operators.");
671     if (mg->galerkin == 1) mg->galerkin = 2;
672 
673     /* clean up */
674     for (level=1; level<pc_gamg->Nlevels; level++) {
675       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
676       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
677     }
678     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
679   } else {
680     KSP smoother;
681     ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr);
682     ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
683     ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr);
684     ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
685     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
686   }
687   PetscFunctionReturn(0);
688 }
689 
690 /* ------------------------------------------------------------------------- */
691 /*
692  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
693    that was created with PCCreate_GAMG().
694 
695    Input Parameter:
696 .  pc - the preconditioner context
697 
698    Application Interface Routine: PCDestroy()
699 */
700 #undef __FUNCT__
701 #define __FUNCT__ "PCDestroy_GAMG"
702 PetscErrorCode PCDestroy_GAMG(PC pc)
703 {
704   PetscErrorCode ierr;
705   PC_MG          *mg     = (PC_MG*)pc->data;
706   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;
707 
708   PetscFunctionBegin;
709   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
710   if (pc_gamg->ops->destroy) {
711     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
712   }
713   ierr = PetscRandomDestroy(&pc_gamg->random);CHKERRQ(ierr);
714   ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr);
715   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
716   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
717   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "PCGAMGSetProcEqLim"
723 /*@
724    PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction.
725 
726    Logically Collective on PC
727 
728    Input Parameters:
729 +  pc - the preconditioner context
730 -  n - the number of equations
731 
732 
733    Options Database Key:
734 .  -pc_gamg_process_eq_limit <limit>
735 
736    Notes: GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process
737           that has degrees of freedom
738 
739    Level: intermediate
740 
741    Concepts: Unstructured multigrid preconditioner
742 
743 .seealso: PCGAMGSetCoarseEqLim()
744 @*/
745 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
746 {
747   PetscErrorCode ierr;
748 
749   PetscFunctionBegin;
750   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
751   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 #undef __FUNCT__
756 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG"
757 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
758 {
759   PC_MG   *mg      = (PC_MG*)pc->data;
760   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
761 
762   PetscFunctionBegin;
763   if (n>0) pc_gamg->min_eq_proc = n;
764   PetscFunctionReturn(0);
765 }
766 
767 #undef __FUNCT__
768 #define __FUNCT__ "PCGAMGSetCoarseEqLim"
769 /*@
770    PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid.
771 
772  Collective on PC
773 
774    Input Parameters:
775 +  pc - the preconditioner context
776 -  n - maximum number of equations to aim for
777 
778    Options Database Key:
779 .  -pc_gamg_coarse_eq_limit <limit>
780 
781    Level: intermediate
782 
783    Concepts: Unstructured multigrid preconditioner
784 
785 .seealso: PCGAMGSetProcEqLim()
786 @*/
787 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
788 {
789   PetscErrorCode ierr;
790 
791   PetscFunctionBegin;
792   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
793   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
794   PetscFunctionReturn(0);
795 }
796 
797 #undef __FUNCT__
798 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG"
799 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
800 {
801   PC_MG   *mg      = (PC_MG*)pc->data;
802   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
803 
804   PetscFunctionBegin;
805   if (n>0) pc_gamg->coarse_eq_limit = n;
806   PetscFunctionReturn(0);
807 }
808 
809 #undef __FUNCT__
810 #define __FUNCT__ "PCGAMGSetRepartition"
811 /*@
812    PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids
813 
814    Collective on PC
815 
816    Input Parameters:
817 +  pc - the preconditioner context
818 -  n - PETSC_TRUE or PETSC_FALSE
819 
820    Options Database Key:
821 .  -pc_gamg_repartition <true,false>
822 
823    Notes: this will generally improve the loading balancing of the work on each level
824 
825    Level: intermediate
826 
827    Concepts: Unstructured multigrid preconditioner
828 
829 .seealso: ()
830 @*/
831 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n)
832 {
833   PetscErrorCode ierr;
834 
835   PetscFunctionBegin;
836   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
837   ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
838   PetscFunctionReturn(0);
839 }
840 
841 #undef __FUNCT__
842 #define __FUNCT__ "PCGAMGSetRepartition_GAMG"
843 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n)
844 {
845   PC_MG   *mg      = (PC_MG*)pc->data;
846   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
847 
848   PetscFunctionBegin;
849   pc_gamg->repart = n;
850   PetscFunctionReturn(0);
851 }
852 
853 #undef __FUNCT__
854 #define __FUNCT__ "PCGAMGSetReuseInterpolation"
855 /*@
856    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner
857 
858    Collective on PC
859 
860    Input Parameters:
861 +  pc - the preconditioner context
862 -  n - PETSC_TRUE or PETSC_FALSE
863 
864    Options Database Key:
865 .  -pc_gamg_reuse_interpolation <true,false>
866 
867    Level: intermediate
868 
869    Notes: this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows
870           rebuilding the preconditioner quicker.
871 
872    Concepts: Unstructured multigrid preconditioner
873 
874 .seealso: ()
875 @*/
876 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
877 {
878   PetscErrorCode ierr;
879 
880   PetscFunctionBegin;
881   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
882   ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 #undef __FUNCT__
887 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG"
888 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
889 {
890   PC_MG   *mg      = (PC_MG*)pc->data;
891   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
892 
893   PetscFunctionBegin;
894   pc_gamg->reuse_prol = n;
895   PetscFunctionReturn(0);
896 }
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "PCGAMGASMSetUseAggs"
900 /*@
901    PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner.
902 
903    Collective on PC
904 
905    Input Parameters:
906 +  pc - the preconditioner context
907 -  flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not
908 
909    Options Database Key:
910 .  -pc_gamg_asm_use_agg
911 
912    Level: intermediate
913 
914    Concepts: Unstructured multigrid preconditioner
915 
916 .seealso: ()
917 @*/
918 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg)
919 {
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
924   ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "PCGAMGASMSetUseAggs_GAMG"
930 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg)
931 {
932   PC_MG   *mg      = (PC_MG*)pc->data;
933   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
934 
935   PetscFunctionBegin;
936   pc_gamg->use_aggs_in_asm = flg;
937   PetscFunctionReturn(0);
938 }
939 
940 #undef __FUNCT__
941 #define __FUNCT__ "PCGAMGSetUseParallelCoarseGridSolve"
942 /*@
943    PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver
944 
945    Collective on PC
946 
947    Input Parameters:
948 +  pc - the preconditioner context
949 -  flg - PETSC_TRUE to not force coarse grid onto one processor
950 
951    Options Database Key:
952 .  -pc_gamg_use_parallel_coarse_grid_solver
953 
954    Level: intermediate
955 
956    Concepts: Unstructured multigrid preconditioner
957 
958 .seealso: ()
959 @*/
960 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg)
961 {
962   PetscErrorCode ierr;
963 
964   PetscFunctionBegin;
965   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
966   ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 #undef __FUNCT__
971 #define __FUNCT__ "PCGAMGSetUseParallelCoarseGridSolve_GAMG"
972 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg)
973 {
974   PC_MG   *mg      = (PC_MG*)pc->data;
975   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
976 
977   PetscFunctionBegin;
978   pc_gamg->use_parallel_coarse_grid_solver = flg;
979   PetscFunctionReturn(0);
980 }
981 
982 #undef __FUNCT__
983 #define __FUNCT__ "PCGAMGSetNlevels"
984 /*@
985    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use
986 
987    Not collective on PC
988 
989    Input Parameters:
990 +  pc - the preconditioner
991 -  n - the maximum number of levels to use
992 
993    Options Database Key:
994 .  -pc_mg_levels
995 
996    Level: intermediate
997 
998    Concepts: Unstructured multigrid preconditioner
999 
1000 .seealso: ()
1001 @*/
1002 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1003 {
1004   PetscErrorCode ierr;
1005 
1006   PetscFunctionBegin;
1007   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1008   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
1009   PetscFunctionReturn(0);
1010 }
1011 
1012 #undef __FUNCT__
1013 #define __FUNCT__ "PCGAMGSetNlevels_GAMG"
1014 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1015 {
1016   PC_MG   *mg      = (PC_MG*)pc->data;
1017   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1018 
1019   PetscFunctionBegin;
1020   pc_gamg->Nlevels = n;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "PCGAMGSetThreshold"
1026 /*@
1027    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1028 
1029    Not collective on PC
1030 
1031    Input Parameters:
1032 +  pc - the preconditioner context
1033 -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph
1034 
1035    Options Database Key:
1036 .  -pc_gamg_threshold <threshold>
1037 
1038    Notes: Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different
1039     (perhaps better) coarser set of points.
1040 
1041    Level: intermediate
1042 
1043    Concepts: Unstructured multigrid preconditioner
1044 
1045 .seealso: ()
1046 @*/
1047 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1048 {
1049   PetscErrorCode ierr;
1050 
1051   PetscFunctionBegin;
1052   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1053   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr);
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 #undef __FUNCT__
1058 #define __FUNCT__ "PCGAMGSetThreshold_GAMG"
1059 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1060 {
1061   PC_MG   *mg      = (PC_MG*)pc->data;
1062   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1063 
1064   PetscFunctionBegin;
1065   pc_gamg->threshold = n;
1066   PetscFunctionReturn(0);
1067 }
1068 
1069 #undef __FUNCT__
1070 #define __FUNCT__ "PCGAMGSetType"
1071 /*@
1072    PCGAMGSetType - Set solution method
1073 
1074    Collective on PC
1075 
1076    Input Parameters:
1077 +  pc - the preconditioner context
1078 -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL
1079 
1080    Options Database Key:
1081 .  -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply
1082 
1083    Level: intermediate
1084 
1085    Concepts: Unstructured multigrid preconditioner
1086 
1087 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType
1088 @*/
1089 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1090 {
1091   PetscErrorCode ierr;
1092 
1093   PetscFunctionBegin;
1094   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1095   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr);
1096   PetscFunctionReturn(0);
1097 }
1098 
1099 #undef __FUNCT__
1100 #define __FUNCT__ "PCGAMGGetType"
1101 /*@
1102    PCGAMGGetType - Get solution method
1103 
1104    Collective on PC
1105 
1106    Input Parameter:
1107 .  pc - the preconditioner context
1108 
1109    Output Parameter:
1110 .  type - the type of algorithm used
1111 
1112    Level: intermediate
1113 
1114    Concepts: Unstructured multigrid preconditioner
1115 
1116 .seealso: PCGAMGSetType(), PCGAMGType
1117 @*/
1118 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1119 {
1120   PetscErrorCode ierr;
1121 
1122   PetscFunctionBegin;
1123   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1124   ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr);
1125   PetscFunctionReturn(0);
1126 }
1127 
1128 #undef __FUNCT__
1129 #define __FUNCT__ "PCGAMGGetType_GAMG"
1130 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1131 {
1132   PC_MG          *mg      = (PC_MG*)pc->data;
1133   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1134 
1135   PetscFunctionBegin;
1136   *type = pc_gamg->type;
1137   PetscFunctionReturn(0);
1138 }
1139 
1140 #undef __FUNCT__
1141 #define __FUNCT__ "PCGAMGSetType_GAMG"
1142 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1143 {
1144   PetscErrorCode ierr,(*r)(PC);
1145   PC_MG          *mg      = (PC_MG*)pc->data;
1146   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1147 
1148   PetscFunctionBegin;
1149   pc_gamg->type = type;
1150   ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr);
1151   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1152   if (pc_gamg->ops->destroy) {
1153     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
1154     ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr);
1155     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1156     /* cleaning up common data in pc_gamg - this should disapear someday */
1157     pc_gamg->data_cell_cols = 0;
1158     pc_gamg->data_cell_rows = 0;
1159     pc_gamg->orig_data_cell_cols = 0;
1160     pc_gamg->orig_data_cell_rows = 0;
1161     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
1162     pc_gamg->data_sz = 0;
1163   }
1164   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
1165   ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr);
1166   ierr = (*r)(pc);CHKERRQ(ierr);
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 #undef __FUNCT__
1171 #define __FUNCT__ "PCView_GAMG"
1172 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1173 {
1174   PetscErrorCode ierr;
1175   PC_MG          *mg      = (PC_MG*)pc->data;
1176   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1177 
1178   PetscFunctionBegin;
1179   ierr = PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");CHKERRQ(ierr);
1180   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);CHKERRQ(ierr);
1181   if (pc_gamg->use_aggs_in_asm) {
1182     ierr = PetscViewerASCIIPrintf(viewer,"      Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr);
1183   }
1184   if (pc_gamg->use_parallel_coarse_grid_solver) {
1185     ierr = PetscViewerASCIIPrintf(viewer,"      Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr);
1186   }
1187   if (pc_gamg->ops->view) {
1188     ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr);
1189   }
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 #undef __FUNCT__
1194 #define __FUNCT__ "PCSetFromOptions_GAMG"
1195 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1196 {
1197   PetscErrorCode ierr;
1198   PC_MG          *mg      = (PC_MG*)pc->data;
1199   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1200   PetscBool      flag;
1201   MPI_Comm       comm;
1202   char           prefix[256];
1203   const char     *pcpre;
1204 
1205   PetscFunctionBegin;
1206   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1207   ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr);
1208   {
1209     char tname[256];
1210     ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr);
1211     if (flag) {
1212       ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr);
1213     }
1214     ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr);
1215     ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr);
1216     ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation agragates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr);
1217     ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr);
1218     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr);
1219     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr);
1220     ierr = PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);CHKERRQ(ierr);
1221     ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
1222 
1223     /* set options for subtype */
1224     if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);}
1225   }
1226   ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
1227   ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
1228   ierr = PetscObjectSetOptionsPrefix((PetscObject)pc_gamg->random,prefix);CHKERRQ(ierr);
1229   ierr = PetscRandomSetFromOptions(pc_gamg->random);CHKERRQ(ierr);
1230   ierr = PetscOptionsTail();CHKERRQ(ierr);
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 /* -------------------------------------------------------------------------- */
1235 /*MC
1236      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner
1237 
1238    Options Database Keys:
1239 +   -pc_gamg_type <type> - one of agg, geo, or classical
1240 .   -pc_gamg_repartition  <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined
1241 .   -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations
1242 .   -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother
1243 .   -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit>
1244                                         equations on each process that has degrees of freedom
1245 .   -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for.
1246 -   -pc_gamg_threshold <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different
1247 
1248    Options Database Keys for default Aggregation:
1249 +  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
1250 .  -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation
1251 -  -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it
1252 
1253    Multigrid options(inherited):
1254 +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1255 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1256 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1257 .  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade
1258 -  -pc_mg_levels <levels> - Number of levels of multigrid to use.
1259 
1260 
1261   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1262 $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1263 $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1264 $       See the Users Manual Chapter 4 for more details
1265 
1266   Level: intermediate
1267 
1268   Concepts: algebraic multigrid
1269 
1270 .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1271            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation()
1272 M*/
1273 
1274 #undef __FUNCT__
1275 #define __FUNCT__ "PCCreate_GAMG"
1276 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1277 {
1278   PetscErrorCode ierr;
1279   PC_GAMG        *pc_gamg;
1280   PC_MG          *mg;
1281 
1282   PetscFunctionBegin;
1283    /* register AMG type */
1284   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1285 
1286   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1287   ierr = PCSetType(pc, PCMG);CHKERRQ(ierr);
1288   ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr);
1289 
1290   /* create a supporting struct and attach it to pc */
1291   ierr         = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr);
1292   mg           = (PC_MG*)pc->data;
1293   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally from PCMG by GAMG code */
1294   mg->innerctx = pc_gamg;
1295 
1296   ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr);
1297 
1298   pc_gamg->setup_count = 0;
1299   /* these should be in subctx but repartitioning needs simple arrays */
1300   pc_gamg->data_sz = 0;
1301   pc_gamg->data    = 0;
1302 
1303   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1304   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1305   pc->ops->setup          = PCSetUp_GAMG;
1306   pc->ops->reset          = PCReset_GAMG;
1307   pc->ops->destroy        = PCDestroy_GAMG;
1308   mg->view                = PCView_GAMG;
1309 
1310   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr);
1311   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr);
1312   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr);
1313   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr);
1314   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr);
1315   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr);
1316   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr);
1319   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr);
1320   pc_gamg->repart           = PETSC_FALSE;
1321   pc_gamg->reuse_prol       = PETSC_FALSE;
1322   pc_gamg->use_aggs_in_asm  = PETSC_FALSE;
1323   pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE;
1324   pc_gamg->min_eq_proc      = 50;
1325   pc_gamg->coarse_eq_limit  = 50;
1326   pc_gamg->threshold        = 0.;
1327   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1328   pc_gamg->current_level    = 0; /* don't need to init really */
1329   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1330 
1331   ierr = PetscRandomCreate(PetscObjectComm((PetscObject)pc),&pc_gamg->random);CHKERRQ(ierr);
1332 
1333   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1334   ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr);
1335   PetscFunctionReturn(0);
1336 }
1337 
1338 #undef __FUNCT__
1339 #define __FUNCT__ "PCGAMGInitializePackage"
1340 /*@C
1341  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1342     from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1343     when using static libraries.
1344 
1345  Level: developer
1346 
1347  .keywords: PC, PCGAMG, initialize, package
1348  .seealso: PetscInitialize()
1349 @*/
1350 PetscErrorCode PCGAMGInitializePackage(void)
1351 {
1352   PetscErrorCode ierr;
1353 
1354   PetscFunctionBegin;
1355   if (PCGAMGPackageInitialized) PetscFunctionReturn(0);
1356   PCGAMGPackageInitialized = PETSC_TRUE;
1357   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr);
1358   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr);
1359   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr);
1360   ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr);
1361 
1362   /* general events */
1363   ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr);
1364   ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr);
1365   ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr);
1366   ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr);
1367   ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr);
1368   ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr);
1369   ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr);
1370 
1371 #if defined PETSC_GAMG_USE_LOG
1372   ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr);
1373   ierr = PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr);
1374   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1375   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1376   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1377   ierr = PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr);
1378   ierr = PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr);
1379   ierr = PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr);
1380   ierr = PetscLogEventRegister("    search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr);
1381   ierr = PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr);
1382   ierr = PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr);
1383   ierr = PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr);
1384   ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr);
1385   ierr = PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr);
1386   ierr = PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr);
1387   ierr = PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr);
1388   ierr = PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr);
1389 
1390   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1391   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1392   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1393   /* create timer stages */
1394 #if defined GAMG_STAGES
1395   {
1396     char     str[32];
1397     PetscInt lidx;
1398     sprintf(str,"MG Level %d (finest)",0);
1399     ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr);
1400     for (lidx=1; lidx<9; lidx++) {
1401       sprintf(str,"MG Level %d",lidx);
1402       ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr);
1403     }
1404   }
1405 #endif
1406 #endif
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 #undef __FUNCT__
1411 #define __FUNCT__ "PCGAMGFinalizePackage"
1412 /*@C
1413  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1414     called from PetscFinalize() automatically.
1415 
1416  Level: developer
1417 
1418  .keywords: Petsc, destroy, package
1419  .seealso: PetscFinalize()
1420 @*/
1421 PetscErrorCode PCGAMGFinalizePackage(void)
1422 {
1423   PetscErrorCode ierr;
1424 
1425   PetscFunctionBegin;
1426   PCGAMGPackageInitialized = PETSC_FALSE;
1427   ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr);
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 #undef __FUNCT__
1432 #define __FUNCT__ "PCGAMGRegister"
1433 /*@C
1434  PCGAMGRegister - Register a PCGAMG implementation.
1435 
1436  Input Parameters:
1437  + type - string that will be used as the name of the GAMG type.
1438  - create - function for creating the gamg context.
1439 
1440   Level: advanced
1441 
1442  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1443 @*/
1444 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1445 {
1446   PetscErrorCode ierr;
1447 
1448   PetscFunctionBegin;
1449   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1450   ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr);
1451   PetscFunctionReturn(0);
1452 }
1453 
1454