xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 32cc76960ddbb48660f8e7c667e293c0ccd0e7d7)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
7 
8 #if defined PETSC_GAMG_USE_LOG
9 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10 #endif
11 
12 #if defined PETSC_USE_LOG
13 PetscLogEvent PC_GAMGGraph_AGG;
14 PetscLogEvent PC_GAMGGraph_GEO;
15 PetscLogEvent PC_GAMGCoarsen_AGG;
16 PetscLogEvent PC_GAMGCoarsen_GEO;
17 PetscLogEvent PC_GAMGProlongator_AGG;
18 PetscLogEvent PC_GAMGProlongator_GEO;
19 PetscLogEvent PC_GAMGOptProlongator_AGG;
20 #endif
21 
22 #define GAMG_MAXLEVELS 30
23 
24 /* #define GAMG_STAGES */
25 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
26 static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
27 #endif
28 
29 static PetscFunctionList GAMGList = 0;
30 static PetscBool PCGAMGPackageInitialized;
31 
32 /* ----------------------------------------------------------------------------- */
33 #undef __FUNCT__
34 #define __FUNCT__ "PCReset_GAMG"
35 PetscErrorCode PCReset_GAMG(PC pc)
36 {
37   PetscErrorCode ierr;
38   PC_MG          *mg      = (PC_MG*)pc->data;
39   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
40 
41   PetscFunctionBegin;
42   if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n");
43   pc_gamg->data_sz = 0;
44   ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr);
45   PetscFunctionReturn(0);
46 }
47 
48 /* -------------------------------------------------------------------------- */
49 /*
50    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
51      of active processors.
52 
53    Input Parameter:
54    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
55           'pc_gamg->data_sz' are changed via repartitioning/reduction.
56    . Amat_fine - matrix on this fine (k) level
57    . cr_bs - coarse block size
58    In/Output Parameter:
59    . a_P_inout - prolongation operator to the next level (k-->k-1)
60    . a_nactive_proc - number of active procs
61    Output Parameter:
62    . a_Amat_crs - coarse matrix that is created (k-1)
63 */
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "PCGAMGCreateLevel_GAMG"
67 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last)
68 {
69   PetscErrorCode  ierr;
70   PC_MG           *mg         = (PC_MG*)pc->data;
71   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
72   Mat             Cmat,Pold=*a_P_inout;
73   MPI_Comm        comm;
74   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
75   PetscInt        ncrs_eq,ncrs,f_bs;
76 
77   PetscFunctionBegin;
78   ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr);
79   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
80   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
81   ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr);
82   ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr);
83 
84   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
85   ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr);
86   if (pc_gamg->data_cell_rows>0) {
87     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
88   } else {
89     PetscInt  bs;
90     ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr);
91     ncrs = ncrs_eq/bs;
92   }
93 
94   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
95   if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1;
96   else {
97     PetscInt ncrs_eq_glob;
98     ierr     = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr);
99     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
100     if (!new_size) new_size = 1; /* not likely, posible? */
101     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
102   }
103 
104   if (Pcolumnperm) *Pcolumnperm = NULL;
105 
106   if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
107   else {
108     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
109     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;
110 
111     nloc_old = ncrs_eq/cr_bs;
112     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
113 #if defined PETSC_GAMG_USE_LOG
114     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
115 #endif
116     /* make 'is_eq_newproc' */
117     ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
118     if (pc_gamg->repart) {
119       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
120       Mat adj;
121 
122      ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr);
123 
124       /* get 'adj' */
125       if (cr_bs == 1) {
126         ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
127       } else {
128         /* make a scalar matrix to partition (no Stokes here) */
129         Mat               tMat;
130         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
131         const PetscScalar *vals;
132         const PetscInt    *idx;
133         PetscInt          *d_nnz, *o_nnz, M, N;
134         static PetscInt   llev = 0;
135         MatType           mtype;
136 
137         ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr);
138         ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr);
139         ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr);
140         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
141           ierr      = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
142           d_nnz[jj] = ncols/cr_bs;
143           o_nnz[jj] = ncols/cr_bs;
144           ierr      = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
145           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
146           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
147         }
148 
149         ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr);
150         ierr = MatCreate(comm, &tMat);CHKERRQ(ierr);
151         ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr);
152         ierr = MatSetType(tMat,mtype);CHKERRQ(ierr);
153         ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr);
154         ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
155         ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
156 
157         for (ii = Istart_crs; ii < Iend_crs; ii++) {
158           PetscInt dest_row = ii/cr_bs;
159           ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
160           for (jj = 0; jj < ncols; jj++) {
161             PetscInt    dest_col = idx[jj]/cr_bs;
162             PetscScalar v        = 1.0;
163             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr);
164           }
165           ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
166         }
167         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
168         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
169 
170         if (llev++ == -1) {
171           PetscViewer viewer; char fname[32];
172           ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr);
173           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
174           ierr = MatView(tMat, viewer);CHKERRQ(ierr);
175           ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
176         }
177         ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
178         ierr = MatDestroy(&tMat);CHKERRQ(ierr);
179       } /* create 'adj' */
180 
181       { /* partition: get newproc_idx */
182         char            prefix[256];
183         const char      *pcpre;
184         const PetscInt  *is_idx;
185         MatPartitioning mpart;
186         IS              proc_is;
187         PetscInt        targetPE;
188 
189         ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr);
190         ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr);
191         ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
192         ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
193         ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
194         ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
195         ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr);
196         ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr);
197         ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
198 
199         /* collect IS info */
200         ierr     = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr);
201         ierr     = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr);
202         targetPE = 1; /* bring to "front" of machine */
203         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
204         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
205           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
206             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
207           }
208         }
209         ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr);
210         ierr = ISDestroy(&proc_is);CHKERRQ(ierr);
211       }
212       ierr = MatDestroy(&adj);CHKERRQ(ierr);
213 
214       ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr);
215       ierr = PetscFree(newproc_idx);CHKERRQ(ierr);
216     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
217       PetscInt rfactor,targetPE;
218 
219       /* find factor */
220       if (new_size == 1) rfactor = size; /* easy */
221       else {
222         PetscReal best_fact = 0.;
223         jj = -1;
224         for (kk = 1 ; kk <= size ; kk++) {
225           if (!(size%kk)) { /* a candidate */
226             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
227             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
228             if (fact > best_fact) {
229               best_fact = fact; jj = kk;
230             }
231           }
232         }
233         if (jj != -1) rfactor = jj;
234         else rfactor = 1; /* does this happen .. a prime */
235       }
236       new_size = size/rfactor;
237 
238       if (new_size==nactive) {
239         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
240         ierr        = PetscFree(counts);CHKERRQ(ierr);
241         ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr);
242 #if defined PETSC_GAMG_USE_LOG
243         ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
244 #endif
245         PetscFunctionReturn(0);
246       }
247 
248       ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr);
249       targetPE = rank/rfactor;
250       ierr     = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr);
251     } /* end simple 'is_eq_newproc' */
252 
253     /*
254      Create an index set from the is_eq_newproc index set to indicate the mapping TO
255      */
256     ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr);
257     is_eq_num_prim = is_eq_num;
258     /*
259       Determine how many equations/vertices are assigned to each processor
260      */
261     ierr        = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr);
262     ncrs_eq_new = counts[rank];
263     ierr        = ISDestroy(&is_eq_newproc);CHKERRQ(ierr);
264     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */
265 
266     ierr = PetscFree(counts);CHKERRQ(ierr);
267 #if defined PETSC_GAMG_USE_LOG
268     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
269 #endif
270     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
271     {
272     Vec            src_crd, dest_crd;
273     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
274     VecScatter     vecscat;
275     PetscScalar    *array;
276     IS isscat;
277 
278     /* move data (for primal equations only) */
279     /* Create a vector to contain the newly ordered element information */
280     ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr);
281     ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr);
282     ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */
283     /*
284      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
285      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
286      */
287     ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr);
288     ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
289     for (ii=0,jj=0; ii<ncrs; ii++) {
290       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
291       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
292     }
293     ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
294     ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr);
295     ierr = PetscFree(tidx);CHKERRQ(ierr);
296     /*
297      Create a vector to contain the original vertex information for each element
298      */
299     ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr);
300     for (jj=0; jj<ndata_cols; jj++) {
301       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
302       for (ii=0; ii<ncrs; ii++) {
303         for (kk=0; kk<ndata_rows; kk++) {
304           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
305           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
306           ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr);
307         }
308       }
309     }
310     ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr);
311     ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr);
312     /*
313       Scatter the element vertex information (still in the original vertex ordering)
314       to the correct processor
315     */
316     ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr);
317     ierr = ISDestroy(&isscat);CHKERRQ(ierr);
318     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
319     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
320     ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr);
321     ierr = VecDestroy(&src_crd);CHKERRQ(ierr);
322     /*
323       Put the element vertex data into a new allocation of the gdata->ele
324     */
325     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
326     ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr);
327 
328     pc_gamg->data_sz = node_data_sz*ncrs_new;
329     strideNew        = ncrs_new*ndata_rows;
330 
331     ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr);
332     for (jj=0; jj<ndata_cols; jj++) {
333       for (ii=0; ii<ncrs_new; ii++) {
334         for (kk=0; kk<ndata_rows; kk++) {
335           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
336           pc_gamg->data[ix] = PetscRealPart(array[jx]);
337         }
338       }
339     }
340     ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr);
341     ierr = VecDestroy(&dest_crd);CHKERRQ(ierr);
342     }
343     /* move A and P (columns) with new layout */
344 #if defined PETSC_GAMG_USE_LOG
345     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
346 #endif
347 
348     /*
349       Invert for MatGetSubMatrix
350     */
351     ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr);
352     ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */
353     ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr);
354     if (is_eq_num != is_eq_num_prim) {
355       ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */
356     }
357     if (Pcolumnperm) {
358       ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr);
359       *Pcolumnperm = new_eq_indices;
360     }
361     ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr);
362 #if defined PETSC_GAMG_USE_LOG
363     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
364     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
365 #endif
366     /* 'a_Amat_crs' output */
367     {
368       Mat mat;
369       ierr        = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr);
370       *a_Amat_crs = mat;
371     }
372     ierr = MatDestroy(&Cmat);CHKERRQ(ierr);
373 
374 #if defined PETSC_GAMG_USE_LOG
375     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
376 #endif
377     /* prolongator */
378     {
379       IS       findices;
380       PetscInt Istart,Iend;
381       Mat      Pnew;
382 
383       ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr);
384 #if defined PETSC_GAMG_USE_LOG
385       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
386 #endif
387       ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr);
388       ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr);
389       ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr);
390       ierr = ISDestroy(&findices);CHKERRQ(ierr);
391 
392 #if defined PETSC_GAMG_USE_LOG
393       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
394 #endif
395       ierr = MatDestroy(a_P_inout);CHKERRQ(ierr);
396 
397       /* output - repartitioned */
398       *a_P_inout = Pnew;
399     }
400     ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr);
401 
402     *a_nactive_proc = new_size; /* output */
403   }
404   PetscFunctionReturn(0);
405 }
406 
407 /* -------------------------------------------------------------------------- */
408 /*
409    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
410                     by setting data structures and options.
411 
412    Input Parameter:
413 .  pc - the preconditioner context
414 
415 */
416 #undef __FUNCT__
417 #define __FUNCT__ "PCSetUp_GAMG"
418 PetscErrorCode PCSetUp_GAMG(PC pc)
419 {
420   PetscErrorCode ierr;
421   PC_MG          *mg      = (PC_MG*)pc->data;
422   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
423   Mat            Pmat     = pc->pmat;
424   PetscInt       fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
425   MPI_Comm       comm;
426   PetscMPIInt    rank,size,nactivepe;
427   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
428   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
429   PetscLogDouble nnz0=0.,nnztot=0.;
430   MatInfo        info;
431   PetscBool      is_last = PETSC_FALSE;
432 
433   PetscFunctionBegin;
434   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
435   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
436   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
437 
438   if (pc_gamg->setup_count++ > 0) {
439     if ((PetscBool)(!pc_gamg->reuse_prol)) {
440       /* reset everything */
441       ierr = PCReset_MG(pc);CHKERRQ(ierr);
442       pc->setupcalled = 0;
443     } else {
444       PC_MG_Levels **mglevels = mg->levels;
445       /* just do Galerkin grids */
446       Mat          B,dA,dB;
447 
448      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
449       if (pc_gamg->Nlevels > 1) {
450         /* currently only handle case where mat and pmat are the same on coarser levels */
451         ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr);
452         /* (re)set to get dirty flag */
453         ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr);
454 
455         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
456           /* the first time through the matrix structure has changed from repartitioning */
457           if (pc_gamg->setup_count==2) {
458             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
459             ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
460 
461             mglevels[level]->A = B;
462           } else {
463             ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr);
464             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
465           }
466           ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr);
467           dB   = B;
468         }
469       }
470 
471       ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
472       PetscFunctionReturn(0);
473     }
474   }
475 
476   if (!pc_gamg->data) {
477     if (pc_gamg->orig_data) {
478       ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
479       ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr);
480 
481       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
482       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
483       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
484 
485       ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr);
486       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
487     } else {
488       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
489       ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr);
490     }
491   }
492 
493   /* cache original data for reuse */
494   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
495     ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr);
496     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
497     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
498     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
499   }
500 
501   /* get basic dims */
502   ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
503   ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr);
504 
505   ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
506   nnz0   = info.nz_used;
507   nnztot = info.nz_used;
508   ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr);
509 
510   /* Get A_i and R_i */
511   for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) {
512     pc_gamg->current_level = level;
513     level1 = level + 1;
514 #if defined PETSC_GAMG_USE_LOG
515     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
516 #if (defined GAMG_STAGES)
517     ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr);
518 #endif
519 #endif
520     { /* construct prolongator */
521       Mat              Gmat;
522       PetscCoarsenData *agg_lists;
523       Mat              Prol11;
524 
525       ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr);
526       ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr);
527       ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr);
528 
529       /* could have failed to create new level */
530       if (Prol11) {
531         /* get new block size of coarse matrices */
532         ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr);
533 
534         if (pc_gamg->ops->optprolongator) {
535           /* smooth */
536           ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr);
537         }
538 
539         Parr[level1] = Prol11;
540       } else Parr[level1] = NULL; /* failed to coarsen */
541 
542       if (pc_gamg->use_aggs_in_asm) {
543         PetscInt bs;
544         ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr);
545         ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr);
546       }
547 
548       ierr = MatDestroy(&Gmat);CHKERRQ(ierr);
549       ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr);
550     } /* construct prolongator scope */
551 #if defined PETSC_GAMG_USE_LOG
552     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
553 #endif
554     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
555     if (!Parr[level1]) { /* failed to coarsen */
556       ierr =  PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr);
557 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES
558       ierr = PetscLogStagePop();CHKERRQ(ierr);
559 #endif
560       break;
561     }
562 #if defined PETSC_GAMG_USE_LOG
563     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
564 #endif
565     ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */
566     if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????");
567     if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE;
568     ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr);
569 
570 #if defined PETSC_GAMG_USE_LOG
571     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
572 #endif
573     ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */
574     ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr);
575     nnztot += info.nz_used;
576     ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr);
577 
578 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
579     ierr = PetscLogStagePop();CHKERRQ(ierr);
580 #endif
581     /* stop if one node or one proc -- could pull back for singular problems */
582     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
583       ierr =  PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr);
584       level++;
585       break;
586     }
587   } /* levels */
588   ierr                  = PetscFree(pc_gamg->data);CHKERRQ(ierr);
589 
590   ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr);
591   pc_gamg->Nlevels = level + 1;
592   fine_level       = level;
593   ierr             = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
594 
595   if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
596     /* set default smoothers & set operators */
597     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
598       KSP smoother;
599       PC  subpc;
600 
601       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
602       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
603 
604       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
605       /* set ops */
606       ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr);
607       ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr);
608 
609       /* set defaults */
610       ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr);
611 
612       /* set blocks for ASM smoother that uses the 'aggregates' */
613       if (pc_gamg->use_aggs_in_asm) {
614         PetscInt sz;
615         IS       *iss;
616 
617         sz   = nASMBlocksArr[level];
618         iss   = ASMLocalIDsArr[level];
619         ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr);
620         ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr);
621         ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr);
622         if (!sz) {
623           IS       is;
624           ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
625           ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr);
626           ierr = ISDestroy(&is);CHKERRQ(ierr);
627         } else {
628           PetscInt kk;
629           ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr);
630           for (kk=0; kk<sz; kk++) {
631             ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr);
632           }
633           ierr = PetscFree(iss);CHKERRQ(ierr);
634         }
635         ASMLocalIDsArr[level] = NULL;
636         nASMBlocksArr[level]  = 0;
637       } else {
638         ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr);
639       }
640     }
641     {
642       /* coarse grid */
643       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
644       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
645       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
646       ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr);
647       if (!pc_gamg->use_parallel_coarse_grid_solver) {
648         ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
649         ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
650         ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr);
651         ierr = PCSetUp(subpc);CHKERRQ(ierr);
652         ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
653         if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
654         ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
655         ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr);
656         ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
657         ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
658         ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr);
659         /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
660          * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
661          * view every subdomain as though they were different. */
662         ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
663       }
664     }
665 
666     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
667     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
668     ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr);
669     ierr = PetscOptionsEnd();CHKERRQ(ierr);
670     ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
671 
672     /* clean up */
673     for (level=1; level<pc_gamg->Nlevels; level++) {
674       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
675       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
676     }
677     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
678   } else {
679     KSP smoother;
680     ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr);
681     ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
682     ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr);
683     ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
684     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
685   }
686   PetscFunctionReturn(0);
687 }
688 
689 /* ------------------------------------------------------------------------- */
690 /*
691  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
692    that was created with PCCreate_GAMG().
693 
694    Input Parameter:
695 .  pc - the preconditioner context
696 
697    Application Interface Routine: PCDestroy()
698 */
699 #undef __FUNCT__
700 #define __FUNCT__ "PCDestroy_GAMG"
701 PetscErrorCode PCDestroy_GAMG(PC pc)
702 {
703   PetscErrorCode ierr;
704   PC_MG          *mg     = (PC_MG*)pc->data;
705   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;
706 
707   PetscFunctionBegin;
708   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
709   if (pc_gamg->ops->destroy) {
710     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
711   }
712   ierr = PetscRandomDestroy(&pc_gamg->random);CHKERRQ(ierr);
713   ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr);
714   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
715   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
716   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 #undef __FUNCT__
721 #define __FUNCT__ "PCGAMGSetProcEqLim"
722 /*@
723    PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction.
724 
725    Logically Collective on PC
726 
727    Input Parameters:
728 +  pc - the preconditioner context
729 -  n - the number of equations
730 
731 
732    Options Database Key:
733 .  -pc_gamg_process_eq_limit <limit>
734 
735    Notes: GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process
736           that has degrees of freedom
737 
738    Level: intermediate
739 
740    Concepts: Unstructured multigrid preconditioner
741 
742 .seealso: PCGAMGSetCoarseEqLim()
743 @*/
744 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
745 {
746   PetscErrorCode ierr;
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
750   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 #undef __FUNCT__
755 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG"
756 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
757 {
758   PC_MG   *mg      = (PC_MG*)pc->data;
759   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
760 
761   PetscFunctionBegin;
762   if (n>0) pc_gamg->min_eq_proc = n;
763   PetscFunctionReturn(0);
764 }
765 
766 #undef __FUNCT__
767 #define __FUNCT__ "PCGAMGSetCoarseEqLim"
768 /*@
769    PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid.
770 
771  Collective on PC
772 
773    Input Parameters:
774 +  pc - the preconditioner context
775 -  n - maximum number of equations to aim for
776 
777    Options Database Key:
778 .  -pc_gamg_coarse_eq_limit <limit>
779 
780    Level: intermediate
781 
782    Concepts: Unstructured multigrid preconditioner
783 
784 .seealso: PCGAMGSetProcEqLim()
785 @*/
786 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
787 {
788   PetscErrorCode ierr;
789 
790   PetscFunctionBegin;
791   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
792   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
793   PetscFunctionReturn(0);
794 }
795 
796 #undef __FUNCT__
797 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG"
798 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
799 {
800   PC_MG   *mg      = (PC_MG*)pc->data;
801   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
802 
803   PetscFunctionBegin;
804   if (n>0) pc_gamg->coarse_eq_limit = n;
805   PetscFunctionReturn(0);
806 }
807 
808 #undef __FUNCT__
809 #define __FUNCT__ "PCGAMGSetRepartition"
810 /*@
811    PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids
812 
813    Collective on PC
814 
815    Input Parameters:
816 +  pc - the preconditioner context
817 -  n - PETSC_TRUE or PETSC_FALSE
818 
819    Options Database Key:
820 .  -pc_gamg_repartition <true,false>
821 
822    Notes: this will generally improve the loading balancing of the work on each level
823 
824    Level: intermediate
825 
826    Concepts: Unstructured multigrid preconditioner
827 
828 .seealso: ()
829 @*/
830 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n)
831 {
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
836   ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "PCGAMGSetRepartition_GAMG"
842 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n)
843 {
844   PC_MG   *mg      = (PC_MG*)pc->data;
845   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
846 
847   PetscFunctionBegin;
848   pc_gamg->repart = n;
849   PetscFunctionReturn(0);
850 }
851 
852 #undef __FUNCT__
853 #define __FUNCT__ "PCGAMGSetReuseInterpolation"
854 /*@
855    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner
856 
857    Collective on PC
858 
859    Input Parameters:
860 +  pc - the preconditioner context
861 -  n - PETSC_TRUE or PETSC_FALSE
862 
863    Options Database Key:
864 .  -pc_gamg_reuse_interpolation <true,false>
865 
866    Level: intermediate
867 
868    Notes: this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows
869           rebuilding the preconditioner quicker.
870 
871    Concepts: Unstructured multigrid preconditioner
872 
873 .seealso: ()
874 @*/
875 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
876 {
877   PetscErrorCode ierr;
878 
879   PetscFunctionBegin;
880   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
881   ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG"
887 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
888 {
889   PC_MG   *mg      = (PC_MG*)pc->data;
890   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
891 
892   PetscFunctionBegin;
893   pc_gamg->reuse_prol = n;
894   PetscFunctionReturn(0);
895 }
896 
897 #undef __FUNCT__
898 #define __FUNCT__ "PCGAMGASMSetUseAggs"
899 /*@
900    PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner.
901 
902    Collective on PC
903 
904    Input Parameters:
905 +  pc - the preconditioner context
906 -  flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not
907 
908    Options Database Key:
909 .  -pc_gamg_asm_use_agg
910 
911    Level: intermediate
912 
913    Concepts: Unstructured multigrid preconditioner
914 
915 .seealso: ()
916 @*/
917 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg)
918 {
919   PetscErrorCode ierr;
920 
921   PetscFunctionBegin;
922   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
923   ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
924   PetscFunctionReturn(0);
925 }
926 
927 #undef __FUNCT__
928 #define __FUNCT__ "PCGAMGASMSetUseAggs_GAMG"
929 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg)
930 {
931   PC_MG   *mg      = (PC_MG*)pc->data;
932   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
933 
934   PetscFunctionBegin;
935   pc_gamg->use_aggs_in_asm = flg;
936   PetscFunctionReturn(0);
937 }
938 
939 #undef __FUNCT__
940 #define __FUNCT__ "PCGAMGSetUseParallelCoarseGridSolve"
941 /*@
942    PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver
943 
944    Collective on PC
945 
946    Input Parameters:
947 +  pc - the preconditioner context
948 -  flg - PETSC_TRUE to not force coarse grid onto one processor
949 
950    Options Database Key:
951 .  -pc_gamg_use_parallel_coarse_grid_solver
952 
953    Level: intermediate
954 
955    Concepts: Unstructured multigrid preconditioner
956 
957 .seealso: ()
958 @*/
959 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg)
960 {
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
965   ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr);
966   PetscFunctionReturn(0);
967 }
968 
969 #undef __FUNCT__
970 #define __FUNCT__ "PCGAMGSetUseParallelCoarseGridSolve_GAMG"
971 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg)
972 {
973   PC_MG   *mg      = (PC_MG*)pc->data;
974   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
975 
976   PetscFunctionBegin;
977   pc_gamg->use_parallel_coarse_grid_solver = flg;
978   PetscFunctionReturn(0);
979 }
980 
981 #undef __FUNCT__
982 #define __FUNCT__ "PCGAMGSetNlevels"
983 /*@
984    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use
985 
986    Not collective on PC
987 
988    Input Parameters:
989 +  pc - the preconditioner
990 -  n - the maximum number of levels to use
991 
992    Options Database Key:
993 .  -pc_mg_levels
994 
995    Level: intermediate
996 
997    Concepts: Unstructured multigrid preconditioner
998 
999 .seealso: ()
1000 @*/
1001 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1002 {
1003   PetscErrorCode ierr;
1004 
1005   PetscFunctionBegin;
1006   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1007   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
1008   PetscFunctionReturn(0);
1009 }
1010 
1011 #undef __FUNCT__
1012 #define __FUNCT__ "PCGAMGSetNlevels_GAMG"
1013 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1014 {
1015   PC_MG   *mg      = (PC_MG*)pc->data;
1016   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1017 
1018   PetscFunctionBegin;
1019   pc_gamg->Nlevels = n;
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 #undef __FUNCT__
1024 #define __FUNCT__ "PCGAMGSetThreshold"
1025 /*@
1026    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1027 
1028    Not collective on PC
1029 
1030    Input Parameters:
1031 +  pc - the preconditioner context
1032 -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph
1033 
1034    Options Database Key:
1035 .  -pc_gamg_threshold <threshold>
1036 
1037    Notes: Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different
1038     (perhaps better) coarser set of points.
1039 
1040    Level: intermediate
1041 
1042    Concepts: Unstructured multigrid preconditioner
1043 
1044 .seealso: ()
1045 @*/
1046 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1047 {
1048   PetscErrorCode ierr;
1049 
1050   PetscFunctionBegin;
1051   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1052   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr);
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 #undef __FUNCT__
1057 #define __FUNCT__ "PCGAMGSetThreshold_GAMG"
1058 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1059 {
1060   PC_MG   *mg      = (PC_MG*)pc->data;
1061   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1062 
1063   PetscFunctionBegin;
1064   pc_gamg->threshold = n;
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 #undef __FUNCT__
1069 #define __FUNCT__ "PCGAMGSetType"
1070 /*@
1071    PCGAMGSetType - Set solution method
1072 
1073    Collective on PC
1074 
1075    Input Parameters:
1076 +  pc - the preconditioner context
1077 -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL
1078 
1079    Options Database Key:
1080 .  -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply
1081 
1082    Level: intermediate
1083 
1084    Concepts: Unstructured multigrid preconditioner
1085 
1086 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType
1087 @*/
1088 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1089 {
1090   PetscErrorCode ierr;
1091 
1092   PetscFunctionBegin;
1093   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1094   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr);
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 #undef __FUNCT__
1099 #define __FUNCT__ "PCGAMGGetType"
1100 /*@
1101    PCGAMGGetType - Get solution method
1102 
1103    Collective on PC
1104 
1105    Input Parameter:
1106 .  pc - the preconditioner context
1107 
1108    Output Parameter:
1109 .  type - the type of algorithm used
1110 
1111    Level: intermediate
1112 
1113    Concepts: Unstructured multigrid preconditioner
1114 
1115 .seealso: PCGAMGSetType(), PCGAMGType
1116 @*/
1117 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1118 {
1119   PetscErrorCode ierr;
1120 
1121   PetscFunctionBegin;
1122   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1123   ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr);
1124   PetscFunctionReturn(0);
1125 }
1126 
1127 #undef __FUNCT__
1128 #define __FUNCT__ "PCGAMGGetType_GAMG"
1129 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1130 {
1131   PC_MG          *mg      = (PC_MG*)pc->data;
1132   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1133 
1134   PetscFunctionBegin;
1135   *type = pc_gamg->type;
1136   PetscFunctionReturn(0);
1137 }
1138 
1139 #undef __FUNCT__
1140 #define __FUNCT__ "PCGAMGSetType_GAMG"
1141 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1142 {
1143   PetscErrorCode ierr,(*r)(PC);
1144   PC_MG          *mg      = (PC_MG*)pc->data;
1145   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1146 
1147   PetscFunctionBegin;
1148   pc_gamg->type = type;
1149   ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr);
1150   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1151   if (pc_gamg->ops->destroy) {
1152     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
1153     ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr);
1154     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1155     /* cleaning up common data in pc_gamg - this should disapear someday */
1156     pc_gamg->data_cell_cols = 0;
1157     pc_gamg->data_cell_rows = 0;
1158     pc_gamg->orig_data_cell_cols = 0;
1159     pc_gamg->orig_data_cell_rows = 0;
1160     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
1161     pc_gamg->data_sz = 0;
1162   }
1163   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
1164   ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr);
1165   ierr = (*r)(pc);CHKERRQ(ierr);
1166   PetscFunctionReturn(0);
1167 }
1168 
1169 #undef __FUNCT__
1170 #define __FUNCT__ "PCView_GAMG"
1171 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1172 {
1173   PetscErrorCode ierr;
1174   PC_MG          *mg      = (PC_MG*)pc->data;
1175   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1176 
1177   PetscFunctionBegin;
1178   ierr = PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");CHKERRQ(ierr);
1179   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);CHKERRQ(ierr);
1180   if (pc_gamg->use_aggs_in_asm) {
1181     ierr = PetscViewerASCIIPrintf(viewer,"      Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr);
1182   }
1183   if (pc_gamg->use_parallel_coarse_grid_solver) {
1184     ierr = PetscViewerASCIIPrintf(viewer,"      Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr);
1185   }
1186   if (pc_gamg->ops->view) {
1187     ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr);
1188   }
1189   PetscFunctionReturn(0);
1190 }
1191 
1192 #undef __FUNCT__
1193 #define __FUNCT__ "PCSetFromOptions_GAMG"
1194 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1195 {
1196   PetscErrorCode ierr;
1197   PC_MG          *mg      = (PC_MG*)pc->data;
1198   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1199   PetscBool      flag;
1200   MPI_Comm       comm;
1201   char           prefix[256];
1202   const char     *pcpre;
1203 
1204   PetscFunctionBegin;
1205   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1206   ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr);
1207   {
1208     char tname[256];
1209     ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr);
1210     if (flag) {
1211       ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr);
1212     }
1213     ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr);
1214     ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr);
1215     ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation agragates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr);
1216     ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr);
1217     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr);
1218     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr);
1219     ierr = PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);CHKERRQ(ierr);
1220     ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
1221 
1222     /* set options for subtype */
1223     if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);}
1224   }
1225   ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
1226   ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
1227   ierr = PetscObjectSetOptionsPrefix((PetscObject)pc_gamg->random,prefix);CHKERRQ(ierr);
1228   ierr = PetscRandomSetFromOptions(pc_gamg->random);CHKERRQ(ierr);
1229   ierr = PetscOptionsTail();CHKERRQ(ierr);
1230   PetscFunctionReturn(0);
1231 }
1232 
1233 /* -------------------------------------------------------------------------- */
1234 /*MC
1235      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner
1236 
1237    Options Database Keys:
1238 +   -pc_gamg_type <type> - one of agg, geo, or classical
1239 .   -pc_gamg_repartition  <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined
1240 .   -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations
1241 .   -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother
1242 .   -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit>
1243                                         equations on each process that has degrees of freedom
1244 .   -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for.
1245 -   -pc_gamg_threshold <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different
1246 
1247    Options Database Keys for default Aggregation:
1248 +  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
1249 .  -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation
1250 -  -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it
1251 
1252    Multigrid options(inherited):
1253 +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1254 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1255 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1256 .  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade
1257 -  -pc_mg_levels <levels> - Number of levels of multigrid to use.
1258 
1259 
1260   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1261 $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1262 $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1263 $       See the Users Manual Chapter 4 for more details
1264 
1265   Level: intermediate
1266 
1267   Concepts: algebraic multigrid
1268 
1269 .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1270            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation()
1271 M*/
1272 
1273 #undef __FUNCT__
1274 #define __FUNCT__ "PCCreate_GAMG"
1275 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1276 {
1277   PetscErrorCode ierr;
1278   PC_GAMG        *pc_gamg;
1279   PC_MG          *mg;
1280 
1281   PetscFunctionBegin;
1282    /* register AMG type */
1283   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1284 
1285   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1286   ierr = PCSetType(pc, PCMG);CHKERRQ(ierr);
1287   ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr);
1288 
1289   /* create a supporting struct and attach it to pc */
1290   ierr         = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr);
1291   ierr         = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr);
1292   mg           = (PC_MG*)pc->data;
1293   mg->innerctx = pc_gamg;
1294 
1295   ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr);
1296 
1297   pc_gamg->setup_count = 0;
1298   /* these should be in subctx but repartitioning needs simple arrays */
1299   pc_gamg->data_sz = 0;
1300   pc_gamg->data    = 0;
1301 
1302   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1303   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1304   pc->ops->setup          = PCSetUp_GAMG;
1305   pc->ops->reset          = PCReset_GAMG;
1306   pc->ops->destroy        = PCDestroy_GAMG;
1307   mg->view                = PCView_GAMG;
1308 
1309   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr);
1310   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr);
1311   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr);
1312   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr);
1313   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr);
1314   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr);
1315   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr);
1316   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr);
1317   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr);
1318   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr);
1319   pc_gamg->repart           = PETSC_FALSE;
1320   pc_gamg->reuse_prol       = PETSC_FALSE;
1321   pc_gamg->use_aggs_in_asm  = PETSC_FALSE;
1322   pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE;
1323   pc_gamg->min_eq_proc      = 50;
1324   pc_gamg->coarse_eq_limit  = 50;
1325   pc_gamg->threshold        = 0.;
1326   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1327   pc_gamg->current_level    = 0; /* don't need to init really */
1328   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1329 
1330   ierr = PetscRandomCreate(PetscObjectComm((PetscObject)pc),&pc_gamg->random);CHKERRQ(ierr);
1331 
1332   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1333   ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr);
1334   PetscFunctionReturn(0);
1335 }
1336 
1337 #undef __FUNCT__
1338 #define __FUNCT__ "PCGAMGInitializePackage"
1339 /*@C
1340  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1341     from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1342     when using static libraries.
1343 
1344  Level: developer
1345 
1346  .keywords: PC, PCGAMG, initialize, package
1347  .seealso: PetscInitialize()
1348 @*/
1349 PetscErrorCode PCGAMGInitializePackage(void)
1350 {
1351   PetscErrorCode ierr;
1352 
1353   PetscFunctionBegin;
1354   if (PCGAMGPackageInitialized) PetscFunctionReturn(0);
1355   PCGAMGPackageInitialized = PETSC_TRUE;
1356   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr);
1357   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr);
1358   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr);
1359   ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr);
1360 
1361   /* general events */
1362   ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr);
1363   ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr);
1364   ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr);
1365   ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr);
1366   ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr);
1367   ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr);
1368   ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr);
1369 
1370 #if defined PETSC_GAMG_USE_LOG
1371   ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr);
1372   ierr = PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr);
1373   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1374   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1375   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1376   ierr = PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr);
1377   ierr = PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr);
1378   ierr = PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr);
1379   ierr = PetscLogEventRegister("    search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr);
1380   ierr = PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr);
1381   ierr = PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr);
1382   ierr = PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr);
1383   ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr);
1384   ierr = PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr);
1385   ierr = PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr);
1386   ierr = PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr);
1387   ierr = PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr);
1388 
1389   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1390   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1391   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1392   /* create timer stages */
1393 #if defined GAMG_STAGES
1394   {
1395     char     str[32];
1396     PetscInt lidx;
1397     sprintf(str,"MG Level %d (finest)",0);
1398     ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr);
1399     for (lidx=1; lidx<9; lidx++) {
1400       sprintf(str,"MG Level %d",lidx);
1401       ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr);
1402     }
1403   }
1404 #endif
1405 #endif
1406   PetscFunctionReturn(0);
1407 }
1408 
1409 #undef __FUNCT__
1410 #define __FUNCT__ "PCGAMGFinalizePackage"
1411 /*@C
1412  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1413     called from PetscFinalize() automatically.
1414 
1415  Level: developer
1416 
1417  .keywords: Petsc, destroy, package
1418  .seealso: PetscFinalize()
1419 @*/
1420 PetscErrorCode PCGAMGFinalizePackage(void)
1421 {
1422   PetscErrorCode ierr;
1423 
1424   PetscFunctionBegin;
1425   PCGAMGPackageInitialized = PETSC_FALSE;
1426   ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr);
1427   PetscFunctionReturn(0);
1428 }
1429 
1430 #undef __FUNCT__
1431 #define __FUNCT__ "PCGAMGRegister"
1432 /*@C
1433  PCGAMGRegister - Register a PCGAMG implementation.
1434 
1435  Input Parameters:
1436  + type - string that will be used as the name of the GAMG type.
1437  - create - function for creating the gamg context.
1438 
1439   Level: advanced
1440 
1441  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1442 @*/
1443 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1444 {
1445   PetscErrorCode ierr;
1446 
1447   PetscFunctionBegin;
1448   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1449   ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr);
1450   PetscFunctionReturn(0);
1451 }
1452 
1453