xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision c3d89e0aef1fb8977d2f9d367fc4b91cc47aeb54)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include <petsc/private/matimpl.h>
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <petsc/private/kspimpl.h>
7 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
8 
9 #if defined PETSC_GAMG_USE_LOG
10 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
11 #endif
12 
13 #if defined PETSC_USE_LOG
14 PetscLogEvent PC_GAMGGraph_AGG;
15 PetscLogEvent PC_GAMGGraph_GEO;
16 PetscLogEvent PC_GAMGCoarsen_AGG;
17 PetscLogEvent PC_GAMGCoarsen_GEO;
18 PetscLogEvent PC_GAMGProlongator_AGG;
19 PetscLogEvent PC_GAMGProlongator_GEO;
20 PetscLogEvent PC_GAMGOptProlongator_AGG;
21 #endif
22 
23 #define GAMG_MAXLEVELS 30
24 
25 /* #define GAMG_STAGES */
26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
28 #endif
29 
30 static PetscFunctionList GAMGList = 0;
31 static PetscBool PCGAMGPackageInitialized;
32 
33 /* ----------------------------------------------------------------------------- */
34 #undef __FUNCT__
35 #define __FUNCT__ "PCReset_GAMG"
36 PetscErrorCode PCReset_GAMG(PC pc)
37 {
38   PetscErrorCode ierr;
39   PC_MG          *mg      = (PC_MG*)pc->data;
40   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
41 
42   PetscFunctionBegin;
43   if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */
44     PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__);
45     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
46   }
47   pc_gamg->data_sz = 0;
48   ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 /* -------------------------------------------------------------------------- */
53 /*
54    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
55      of active processors.
56 
57    Input Parameter:
58    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
59           'pc_gamg->data_sz' are changed via repartitioning/reduction.
60    . Amat_fine - matrix on this fine (k) level
61    . cr_bs - coarse block size
62    In/Output Parameter:
63    . a_P_inout - prolongation operator to the next level (k-->k-1)
64    . a_nactive_proc - number of active procs
65    Output Parameter:
66    . a_Amat_crs - coarse matrix that is created (k-1)
67 */
68 
69 #undef __FUNCT__
70 #define __FUNCT__ "PCGAMGCreateLevel_GAMG"
71 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,
72                                   Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,
73                                   IS * Pcolumnperm)
74 {
75   PetscErrorCode  ierr;
76   PC_MG           *mg         = (PC_MG*)pc->data;
77   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
78   Mat             Cmat,Pold=*a_P_inout;
79   MPI_Comm        comm;
80   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
81   PetscInt        ncrs_eq,ncrs,f_bs;
82 
83   PetscFunctionBegin;
84   ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr);
85   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
86   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
87   ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr);
88   ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr);
89 
90   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
91   ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr);
92   if (pc_gamg->data_cell_rows>0) {
93     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
94   } else {
95     PetscInt  bs;
96     ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr);
97     ncrs = ncrs_eq/bs;
98   }
99 
100   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
101   {
102     PetscInt ncrs_eq_glob;
103     ierr     = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr);
104     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
105     if (!new_size) new_size = 1; /* not likely, posible? */
106     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
107   }
108 
109   if (Pcolumnperm) *Pcolumnperm = NULL;
110 
111   if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
112   else {
113     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
114     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;
115 
116     nloc_old = ncrs_eq/cr_bs;
117     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
118 #if defined PETSC_GAMG_USE_LOG
119     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
120 #endif
121     /* make 'is_eq_newproc' */
122     ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr);
123     if (pc_gamg->repart) {
124       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
125       Mat adj;
126 
127      ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, neq = %D\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr);
128 
129       /* get 'adj' */
130       if (cr_bs == 1) {
131         ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
132       } else {
133         /* make a scalar matrix to partition (no Stokes here) */
134         Mat               tMat;
135         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
136         const PetscScalar *vals;
137         const PetscInt    *idx;
138         PetscInt          *d_nnz, *o_nnz, M, N;
139         static PetscInt   llev = 0;
140         MatType           mtype;
141 
142         ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr);
143         ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr);
144         ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr);
145         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
146           ierr      = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
147           d_nnz[jj] = ncols/cr_bs;
148           o_nnz[jj] = ncols/cr_bs;
149           ierr      = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr);
150           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
151           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
152         }
153 
154         ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr);
155         ierr = MatCreate(comm, &tMat);CHKERRQ(ierr);
156         ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr);
157         ierr = MatSetType(tMat,mtype);CHKERRQ(ierr);
158         ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr);
159         ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
160         ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
161 
162         for (ii = Istart_crs; ii < Iend_crs; ii++) {
163           PetscInt dest_row = ii/cr_bs;
164           ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
165           for (jj = 0; jj < ncols; jj++) {
166             PetscInt    dest_col = idx[jj]/cr_bs;
167             PetscScalar v        = 1.0;
168             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr);
169           }
170           ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr);
171         }
172         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
173         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
174 
175         if (llev++ == -1) {
176           PetscViewer viewer; char fname[32];
177           ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr);
178           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
179           ierr = MatView(tMat, viewer);CHKERRQ(ierr);
180           ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
181         }
182 
183         ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr);
184 
185         ierr = MatDestroy(&tMat);CHKERRQ(ierr);
186       } /* create 'adj' */
187 
188       { /* partition: get newproc_idx */
189         char            prefix[256];
190         const char      *pcpre;
191         const PetscInt  *is_idx;
192         MatPartitioning mpart;
193         IS              proc_is;
194         PetscInt        targetPE;
195 
196         ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr);
197         ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr);
198         ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
199         ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
200         ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
201         ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr);
202         ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr);
203         ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr);
204         ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr);
205 
206         /* collect IS info */
207         ierr     = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr);
208         ierr     = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr);
209         targetPE = 1; /* bring to "front" of machine */
210         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
211         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
212           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
213             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
214           }
215         }
216         ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr);
217         ierr = ISDestroy(&proc_is);CHKERRQ(ierr);
218       }
219       ierr = MatDestroy(&adj);CHKERRQ(ierr);
220 
221       ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr);
222       ierr = PetscFree(newproc_idx);CHKERRQ(ierr);
223     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
224 
225       PetscInt rfactor,targetPE;
226       /* find factor */
227       if (new_size == 1) rfactor = size; /* easy */
228       else {
229         PetscReal best_fact = 0.;
230         jj = -1;
231         for (kk = 1 ; kk <= size ; kk++) {
232           if (!(size%kk)) { /* a candidate */
233             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
234             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
235             if (fact > best_fact) {
236               best_fact = fact; jj = kk;
237             }
238           }
239         }
240         if (jj != -1) rfactor = jj;
241         else rfactor = 1; /* does this happen .. a prime */
242       }
243       new_size = size/rfactor;
244 
245       if (new_size==nactive) {
246         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
247         ierr        = PetscFree(counts);CHKERRQ(ierr);
248         ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr);
249         PetscFunctionReturn(0);
250       }
251 
252       ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr);
253       targetPE = rank/rfactor;
254       ierr     = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr);
255     } /* end simple 'is_eq_newproc' */
256 
257     /*
258      Create an index set from the is_eq_newproc index set to indicate the mapping TO
259      */
260     ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr);
261     is_eq_num_prim = is_eq_num;
262     /*
263       Determine how many equations/vertices are assigned to each processor
264      */
265     ierr        = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr);
266     ncrs_eq_new = counts[rank];
267     ierr        = ISDestroy(&is_eq_newproc);CHKERRQ(ierr);
268     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */
269 
270     ierr = PetscFree(counts);CHKERRQ(ierr);
271 #if defined PETSC_GAMG_USE_LOG
272     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
273 #endif
274     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
275     {
276     Vec            src_crd, dest_crd;
277     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
278     VecScatter     vecscat;
279     PetscScalar    *array;
280     IS isscat;
281 
282     /* move data (for primal equations only) */
283     /* Create a vector to contain the newly ordered element information */
284     ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr);
285     ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr);
286     ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */
287     /*
288      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
289      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
290      */
291     ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr);
292     ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
293     for (ii=0,jj=0; ii<ncrs; ii++) {
294       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
295       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
296     }
297     ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr);
298     ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr);
299     ierr = PetscFree(tidx);CHKERRQ(ierr);
300     /*
301      Create a vector to contain the original vertex information for each element
302      */
303     ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr);
304     for (jj=0; jj<ndata_cols; jj++) {
305       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
306       for (ii=0; ii<ncrs; ii++) {
307         for (kk=0; kk<ndata_rows; kk++) {
308           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
309           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
310           ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr);
311         }
312       }
313     }
314     ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr);
315     ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr);
316     /*
317       Scatter the element vertex information (still in the original vertex ordering)
318       to the correct processor
319     */
320     ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr);
321     ierr = ISDestroy(&isscat);CHKERRQ(ierr);
322     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
323     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
324     ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr);
325     ierr = VecDestroy(&src_crd);CHKERRQ(ierr);
326     /*
327       Put the element vertex data into a new allocation of the gdata->ele
328     */
329     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
330     ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr);
331 
332     pc_gamg->data_sz = node_data_sz*ncrs_new;
333     strideNew        = ncrs_new*ndata_rows;
334 
335     ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr);
336     for (jj=0; jj<ndata_cols; jj++) {
337       for (ii=0; ii<ncrs_new; ii++) {
338         for (kk=0; kk<ndata_rows; kk++) {
339           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
340           pc_gamg->data[ix] = PetscRealPart(array[jx]);
341         }
342       }
343     }
344     ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr);
345     ierr = VecDestroy(&dest_crd);CHKERRQ(ierr);
346     }
347     /* move A and P (columns) with new layout */
348 #if defined PETSC_GAMG_USE_LOG
349     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
350 #endif
351 
352     /*
353       Invert for MatGetSubMatrix
354     */
355     ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr);
356     ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */
357     ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr);
358     if (is_eq_num != is_eq_num_prim) {
359       ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */
360     }
361     if (Pcolumnperm) {
362       ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr);
363       *Pcolumnperm = new_eq_indices;
364     }
365     ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr);
366 #if defined PETSC_GAMG_USE_LOG
367     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
368     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
369 #endif
370     /* 'a_Amat_crs' output */
371     {
372       Mat mat;
373       ierr        = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr);
374       *a_Amat_crs = mat;
375 
376       if (!PETSC_TRUE) {
377         PetscInt cbs, rbs;
378         ierr = MatGetBlockSizes(Cmat, &rbs, &cbs);CHKERRQ(ierr);
379         ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr);
380         ierr = MatGetBlockSizes(mat, &rbs, &cbs);CHKERRQ(ierr);
381         ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);CHKERRQ(ierr);
382       }
383     }
384     ierr = MatDestroy(&Cmat);CHKERRQ(ierr);
385 
386 #if defined PETSC_GAMG_USE_LOG
387     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
388 #endif
389     /* prolongator */
390     {
391       IS       findices;
392       PetscInt Istart,Iend;
393       Mat      Pnew;
394       ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr);
395 #if defined PETSC_GAMG_USE_LOG
396       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
397 #endif
398       ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr);
399       ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr);
400       ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr);
401       ierr = ISDestroy(&findices);CHKERRQ(ierr);
402 
403       if (!PETSC_TRUE) {
404         PetscInt cbs, rbs;
405         ierr = MatGetBlockSizes(Pold, &rbs, &cbs);CHKERRQ(ierr);
406         ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr);
407         ierr = MatGetBlockSizes(Pnew, &rbs, &cbs);CHKERRQ(ierr);
408         ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr);
409       }
410 #if defined PETSC_GAMG_USE_LOG
411       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
412 #endif
413       ierr = MatDestroy(a_P_inout);CHKERRQ(ierr);
414 
415       /* output - repartitioned */
416       *a_P_inout = Pnew;
417     }
418     ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr);
419 
420     *a_nactive_proc = new_size; /* output */
421   }
422 
423   /* outout matrix data */
424   if (!PETSC_TRUE) {
425     PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
426     if (!llev) {
427       sprintf(fname,"Cmat_%d.m",llev++);
428       PetscViewerASCIIOpen(comm,fname,&viewer);
429       ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr);
430       ierr = MatView(Amat_fine, viewer);CHKERRQ(ierr);
431       ierr = PetscViewerDestroy(&viewer);
432     }
433     sprintf(fname,"Cmat_%d.m",llev++);
434     PetscViewerASCIIOpen(comm,fname,&viewer);
435     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr);
436     ierr = MatView(Cmat, viewer);CHKERRQ(ierr);
437     ierr = PetscViewerDestroy(&viewer);
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 /* -------------------------------------------------------------------------- */
443 /*
444    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
445                     by setting data structures and options.
446 
447    Input Parameter:
448 .  pc - the preconditioner context
449 
450 */
451 #undef __FUNCT__
452 #define __FUNCT__ "PCSetUp_GAMG"
453 PetscErrorCode PCSetUp_GAMG(PC pc)
454 {
455   PetscErrorCode ierr;
456   PC_MG          *mg      = (PC_MG*)pc->data;
457   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
458   Mat            Pmat     = pc->pmat;
459   PetscInt       fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
460   MPI_Comm       comm;
461   PetscMPIInt    rank,size,nactivepe;
462   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
463   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
464   PetscLogDouble nnz0=0.,nnztot=0.;
465   MatInfo        info;
466 
467   PetscFunctionBegin;
468   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
469   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
470   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
471 
472   if (pc_gamg->setup_count++ > 0) {
473     if ((PetscBool)(!pc_gamg->reuse_prol)) {
474       /* reset everything */
475       ierr = PCReset_MG(pc);CHKERRQ(ierr);
476       pc->setupcalled = 0;
477     } else {
478       PC_MG_Levels **mglevels = mg->levels;
479       /* just do Galerkin grids */
480       Mat          B,dA,dB;
481 
482      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
483       if (pc_gamg->Nlevels > 1) {
484         /* currently only handle case where mat and pmat are the same on coarser levels */
485         ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr);
486         /* (re)set to get dirty flag */
487         ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr);
488 
489         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
490           /* the first time through the matrix structure has changed from repartitioning */
491           if (pc_gamg->setup_count==2) {
492             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
493             ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
494 
495             mglevels[level]->A = B;
496           } else {
497             ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr);
498             ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
499           }
500           ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr);
501           dB   = B;
502         }
503       }
504 
505       ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
506 
507       PetscFunctionReturn(0);
508     }
509   }
510 
511   if (!pc_gamg->data) {
512     if (pc_gamg->orig_data) {
513       ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
514       ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr);
515 
516       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
517       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
518       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
519 
520       ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr);
521       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
522     } else {
523       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
524       ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr);
525     }
526   }
527 
528   /* cache original data for reuse */
529   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
530     ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr);
531     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
532     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
533     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
534   }
535 
536   /* get basic dims */
537   ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr);
538   ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr);
539 
540   ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */
541   nnz0   = info.nz_used;
542   nnztot = info.nz_used;
543   ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
544                     0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
545                     (int)(nnz0/(PetscReal)M+0.5),size);
546   CHKERRQ(ierr);
547 
548   /* Get A_i and R_i */
549   for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */
550        level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit);
551        level++) {
552     pc_gamg->current_level = level;
553     level1 = level + 1;
554 #if defined PETSC_GAMG_USE_LOG
555     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
556 #if (defined GAMG_STAGES)
557     ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr);
558 #endif
559 #endif
560     { /* construct prolongator */
561       Mat              Gmat;
562       PetscCoarsenData *agg_lists;
563       Mat              Prol11;
564 
565       ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr);
566       ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr);
567       ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr);
568 
569       /* could have failed to create new level */
570       if (Prol11) {
571         /* get new block size of coarse matrices */
572         ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr);
573 
574         if (pc_gamg->ops->optprolongator) {
575           /* smooth */
576           ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr);
577         }
578 
579         Parr[level1] = Prol11;
580       } else Parr[level1] = NULL;
581 
582       if (pc_gamg->use_aggs_in_gasm) {
583         PetscInt bs;
584         ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr);
585         ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr);
586       }
587 
588       ierr = MatDestroy(&Gmat);CHKERRQ(ierr);
589       ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr);
590     } /* construct prolongator scope */
591 #if defined PETSC_GAMG_USE_LOG
592     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
593 #endif
594     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
595     if (!Parr[level1]) {
596       ierr =  PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr);
597 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
598       ierr = PetscLogStagePop();CHKERRQ(ierr);
599 #endif
600       break;
601     }
602 #if defined PETSC_GAMG_USE_LOG
603     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
604 #endif
605 
606     ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs,&Parr[level1], &Aarr[level1], &nactivepe, NULL);CHKERRQ(ierr);
607 
608 #if defined PETSC_GAMG_USE_LOG
609     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
610 #endif
611     ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr);
612 
613     ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr);
614     nnztot += info.nz_used;
615     ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr);
616 
617 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
618     ierr = PetscLogStagePop();CHKERRQ(ierr);
619 #endif
620     /* stop if one node or one proc -- could pull back for singular problems */
621     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
622       ierr =  PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr);
623       level++;
624       break;
625     }
626   } /* levels */
627   ierr                  = PetscFree(pc_gamg->data);CHKERRQ(ierr);
628 
629   ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr);
630   pc_gamg->Nlevels = level + 1;
631   fine_level       = level;
632   ierr             = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
633 
634   /* simple setup */
635   if (!PETSC_TRUE) {
636     PC_MG_Levels **mglevels = mg->levels;
637     for (lidx=0,level=pc_gamg->Nlevels-1;
638          lidx<fine_level;
639          lidx++, level--) {
640       ierr = PCMGSetInterpolation(pc, lidx+1, Parr[level]);CHKERRQ(ierr);
641       ierr = KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level]);CHKERRQ(ierr);
642       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
643       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
644     }
645     ierr = KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0]);CHKERRQ(ierr);
646 
647     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
648   } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
649     /* set default smoothers & set operators */
650     for (lidx = 1, level = pc_gamg->Nlevels-2;
651          lidx <= fine_level;
652          lidx++, level--) {
653       KSP smoother;
654       PC  subpc;
655 
656       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
657       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
658 
659       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
660       /* set ops */
661       ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr);
662       ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr);
663 
664       /* set defaults */
665       ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr);
666 
667       /* set blocks for GASM smoother that uses the 'aggregates' */
668       if (pc_gamg->use_aggs_in_gasm) {
669         PetscInt sz;
670         IS       *is;
671 
672         sz   = nASMBlocksArr[level];
673         is   = ASMLocalIDsArr[level];
674         ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr);
675         ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr);
676         if (!sz) {
677           IS       is;
678           PetscInt my0,kk;
679           ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr);
680           ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr);
681           ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr);
682           ierr = ISDestroy(&is);CHKERRQ(ierr);
683         } else {
684           PetscInt kk;
685           ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr);
686           for (kk=0; kk<sz; kk++) {
687             ierr = ISDestroy(&is[kk]);CHKERRQ(ierr);
688           }
689           ierr = PetscFree(is);CHKERRQ(ierr);
690         }
691         ASMLocalIDsArr[level] = NULL;
692         nASMBlocksArr[level]  = 0;
693         ierr                  = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr);
694       } else {
695         ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr);
696       }
697     }
698     {
699       /* coarse grid */
700       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
701       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
702       ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr);
703       ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr);
704       ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr);
705       ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr);
706       ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr);
707       ierr = PCSetUp(subpc);CHKERRQ(ierr);
708       ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
709       if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
710       ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
711       ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr);
712       ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr);
713       ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
714       ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr);
715       /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
716        * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
717        * view every subdomain as though they were different. */
718       ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
719     }
720 
721     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
722     ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr);
723     ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr);
724     ierr = PetscOptionsEnd();CHKERRQ(ierr);
725     if (!mg->galerkin) SETERRQ(comm,PETSC_ERR_USER,"PCGAMG must use Galerkin for coarse operators.");
726     if (mg->galerkin == 1) mg->galerkin = 2;
727 
728     /* clean up */
729     for (level=1; level<pc_gamg->Nlevels; level++) {
730       ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr);
731       ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr);
732     }
733 
734     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
735   } else {
736     KSP smoother;
737     ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr);
738     ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr);
739     ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr);
740     ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr);
741     ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
742   }
743   PetscFunctionReturn(0);
744 }
745 
746 /* ------------------------------------------------------------------------- */
747 /*
748  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
749    that was created with PCCreate_GAMG().
750 
751    Input Parameter:
752 .  pc - the preconditioner context
753 
754    Application Interface Routine: PCDestroy()
755 */
756 #undef __FUNCT__
757 #define __FUNCT__ "PCDestroy_GAMG"
758 PetscErrorCode PCDestroy_GAMG(PC pc)
759 {
760   PetscErrorCode ierr;
761   PC_MG          *mg     = (PC_MG*)pc->data;
762   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;
763 
764   PetscFunctionBegin;
765   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
766   if (pc_gamg->ops->destroy) {
767     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
768   }
769   ierr = PetscRandomDestroy(&pc_gamg->random);CHKERRQ(ierr);
770   ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr);
771   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
772   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
773   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
774   PetscFunctionReturn(0);
775 }
776 
777 
778 #undef __FUNCT__
779 #define __FUNCT__ "PCGAMGSetProcEqLim"
780 /*@
781    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction.
782 
783    Logically Collective on PC
784 
785    Input Parameters:
786 +  pc - the preconditioner context
787 -  n - the number of equations
788 
789 
790    Options Database Key:
791 .  -pc_gamg_process_eq_limit <limit>
792 
793    Level: intermediate
794 
795    Concepts: Unstructured multigrid preconditioner
796 
797 .seealso: PCGAMGSetCoarseEqLim()
798 @*/
799 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
800 {
801   PetscErrorCode ierr;
802 
803   PetscFunctionBegin;
804   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
805   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 #undef __FUNCT__
810 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG"
811 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
812 {
813   PC_MG   *mg      = (PC_MG*)pc->data;
814   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
815 
816   PetscFunctionBegin;
817   if (n>0) pc_gamg->min_eq_proc = n;
818   PetscFunctionReturn(0);
819 }
820 
821 #undef __FUNCT__
822 #define __FUNCT__ "PCGAMGSetCoarseEqLim"
823 /*@
824    PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.
825 
826  Collective on PC
827 
828    Input Parameters:
829 +  pc - the preconditioner context
830 -  n - maximum number of equations to aim for
831 
832    Options Database Key:
833 .  -pc_gamg_coarse_eq_limit <limit>
834 
835    Level: intermediate
836 
837    Concepts: Unstructured multigrid preconditioner
838 
839 .seealso: PCGAMGSetProcEqLim()
840 @*/
841 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
842 {
843   PetscErrorCode ierr;
844 
845   PetscFunctionBegin;
846   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
847   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
848   PetscFunctionReturn(0);
849 }
850 
851 #undef __FUNCT__
852 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG"
853 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
854 {
855   PC_MG   *mg      = (PC_MG*)pc->data;
856   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
857 
858   PetscFunctionBegin;
859   if (n>0) pc_gamg->coarse_eq_limit = n;
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "PCGAMGSetRepartitioning"
865 /*@
866    PCGAMGSetRepartitioning - Repartition the coarse grids
867 
868    Collective on PC
869 
870    Input Parameters:
871 +  pc - the preconditioner context
872 -  n - PETSC_TRUE or PETSC_FALSE
873 
874    Options Database Key:
875 .  -pc_gamg_repartition <true,false>
876 
877    Level: intermediate
878 
879    Concepts: Unstructured multigrid preconditioner
880 
881 .seealso: ()
882 @*/
883 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
884 {
885   PetscErrorCode ierr;
886 
887   PetscFunctionBegin;
888   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
889   ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG"
895 static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
896 {
897   PC_MG   *mg      = (PC_MG*)pc->data;
898   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
899 
900   PetscFunctionBegin;
901   pc_gamg->repart = n;
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "PCGAMGSetReuseInterpolation"
907 /*@
908    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner
909 
910    Collective on PC
911 
912    Input Parameters:
913 +  pc - the preconditioner context
914 -  n - PETSC_TRUE or PETSC_FALSE
915 
916    Options Database Key:
917 .  -pc_gamg_reuse_interpolation <true,false>
918 
919    Level: intermediate
920 
921    Concepts: Unstructured multigrid preconditioner
922 
923 .seealso: ()
924 @*/
925 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
926 {
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
931   ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 #undef __FUNCT__
936 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG"
937 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
938 {
939   PC_MG   *mg      = (PC_MG*)pc->data;
940   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
941 
942   PetscFunctionBegin;
943   pc_gamg->reuse_prol = n;
944   PetscFunctionReturn(0);
945 }
946 
947 #undef __FUNCT__
948 #define __FUNCT__ "PCGAMGSetUseASMAggs"
949 /*@
950    PCGAMGSetUseASMAggs -
951 
952    Collective on PC
953 
954    Input Parameters:
955 .  pc - the preconditioner context
956 
957 
958    Options Database Key:
959 .  -pc_gamg_use_agg_gasm
960 
961    Level: intermediate
962 
963    Concepts: Unstructured multigrid preconditioner
964 
965 .seealso: ()
966 @*/
967 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
968 {
969   PetscErrorCode ierr;
970 
971   PetscFunctionBegin;
972   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
973   ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
974   PetscFunctionReturn(0);
975 }
976 
977 #undef __FUNCT__
978 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG"
979 static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
980 {
981   PC_MG   *mg      = (PC_MG*)pc->data;
982   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
983 
984   PetscFunctionBegin;
985   pc_gamg->use_aggs_in_gasm = n;
986   PetscFunctionReturn(0);
987 }
988 
989 #undef __FUNCT__
990 #define __FUNCT__ "PCGAMGSetNlevels"
991 /*@
992    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use
993 
994    Not collective on PC
995 
996    Input Parameters:
997 +  pc - the preconditioner
998 -  n - the maximum number of levels to use
999 
1000    Options Database Key:
1001 .  -pc_mg_levels
1002 
1003    Level: intermediate
1004 
1005    Concepts: Unstructured multigrid preconditioner
1006 
1007 .seealso: ()
1008 @*/
1009 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1010 {
1011   PetscErrorCode ierr;
1012 
1013   PetscFunctionBegin;
1014   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1015   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 #undef __FUNCT__
1020 #define __FUNCT__ "PCGAMGSetNlevels_GAMG"
1021 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1022 {
1023   PC_MG   *mg      = (PC_MG*)pc->data;
1024   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1025 
1026   PetscFunctionBegin;
1027   pc_gamg->Nlevels = n;
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 #undef __FUNCT__
1032 #define __FUNCT__ "PCGAMGSetThreshold"
1033 /*@
1034    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1035 
1036    Not collective on PC
1037 
1038    Input Parameters:
1039 +  pc - the preconditioner context
1040 -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph
1041 
1042    Options Database Key:
1043 .  -pc_gamg_threshold <threshold>
1044 
1045    Level: intermediate
1046 
1047    Concepts: Unstructured multigrid preconditioner
1048 
1049 .seealso: ()
1050 @*/
1051 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1052 {
1053   PetscErrorCode ierr;
1054 
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1057   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr);
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 #undef __FUNCT__
1062 #define __FUNCT__ "PCGAMGSetThreshold_GAMG"
1063 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1064 {
1065   PC_MG   *mg      = (PC_MG*)pc->data;
1066   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1067 
1068   PetscFunctionBegin;
1069   pc_gamg->threshold = n;
1070   PetscFunctionReturn(0);
1071 }
1072 
1073 #undef __FUNCT__
1074 #define __FUNCT__ "PCGAMGSetType"
1075 /*@
1076    PCGAMGSetType - Set solution method
1077 
1078    Collective on PC
1079 
1080    Input Parameters:
1081 +  pc - the preconditioner context
1082 -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL
1083 
1084    Options Database Key:
1085 .  -pc_gamg_type <agg,geo,classical>
1086 
1087    Level: intermediate
1088 
1089    Concepts: Unstructured multigrid preconditioner
1090 
1091 .seealso: PCGAMGGetType(), PCGAMG
1092 @*/
1093 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1094 {
1095   PetscErrorCode ierr;
1096 
1097   PetscFunctionBegin;
1098   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1099   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 #undef __FUNCT__
1104 #define __FUNCT__ "PCGAMGGetType"
1105 /*@
1106    PCGAMGGetType - Get solution method
1107 
1108    Collective on PC
1109 
1110    Input Parameter:
1111 .  pc - the preconditioner context
1112 
1113    Output Parameter:
1114 .  type - the type of algorithm used
1115 
1116    Level: intermediate
1117 
1118    Concepts: Unstructured multigrid preconditioner
1119 
1120 .seealso: PCGAMGSetType(), PCGAMGType
1121 @*/
1122 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1123 {
1124   PetscErrorCode ierr;
1125 
1126   PetscFunctionBegin;
1127   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1128   ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr);
1129   PetscFunctionReturn(0);
1130 }
1131 
1132 #undef __FUNCT__
1133 #define __FUNCT__ "PCGAMGGetType_GAMG"
1134 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1135 {
1136   PC_MG          *mg      = (PC_MG*)pc->data;
1137   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1138 
1139   PetscFunctionBegin;
1140   *type = pc_gamg->type;
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 #undef __FUNCT__
1145 #define __FUNCT__ "PCGAMGSetType_GAMG"
1146 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1147 {
1148   PetscErrorCode ierr,(*r)(PC);
1149   PC_MG          *mg      = (PC_MG*)pc->data;
1150   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1151 
1152   PetscFunctionBegin;
1153   pc_gamg->type = type;
1154   ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr);
1155   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1156   if (pc_gamg->ops->destroy) {
1157     ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr);
1158     ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr);
1159     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1160     /* cleaning up common data in pc_gamg - this should disapear someday */
1161     pc_gamg->data_cell_cols = 0;
1162     pc_gamg->data_cell_rows = 0;
1163     pc_gamg->orig_data_cell_cols = 0;
1164     pc_gamg->orig_data_cell_rows = 0;
1165     ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr);
1166     pc_gamg->data_sz = 0;
1167   }
1168   ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr);
1169   ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr);
1170   ierr = (*r)(pc);CHKERRQ(ierr);
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 #undef __FUNCT__
1175 #define __FUNCT__ "PCView_GAMG"
1176 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1177 {
1178   PetscErrorCode ierr;
1179   PC_MG          *mg      = (PC_MG*)pc->data;
1180   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1181 
1182   PetscFunctionBegin;
1183   ierr = PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");CHKERRQ(ierr);
1184   ierr = PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);CHKERRQ(ierr);
1185   if (pc_gamg->ops->view) {
1186     ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr);
1187   }
1188   PetscFunctionReturn(0);
1189 }
1190 
1191 #undef __FUNCT__
1192 #define __FUNCT__ "PCSetFromOptions_GAMG"
1193 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1194 {
1195   PetscErrorCode ierr;
1196   PC_MG          *mg      = (PC_MG*)pc->data;
1197   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1198   PetscBool      flag;
1199   MPI_Comm       comm;
1200   char           prefix[256];
1201   const char     *pcpre;
1202 
1203   PetscFunctionBegin;
1204   ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
1205   ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr);
1206   {
1207     char tname[256];
1208     ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr);
1209     if (flag) {
1210       ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr);
1211     }
1212     ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr);
1213     ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr);
1214     ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);CHKERRQ(ierr);
1215     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr);
1216     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr);
1217     ierr = PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);CHKERRQ(ierr);
1218     ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr);
1219 
1220     /* set options for subtype */
1221     if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);}
1222   }
1223   ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr);
1224   ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr);
1225   ierr = PetscObjectSetOptionsPrefix((PetscObject)pc_gamg->random,prefix);CHKERRQ(ierr);
1226   ierr = PetscRandomSetFromOptions(pc_gamg->random);CHKERRQ(ierr);
1227   ierr = PetscOptionsTail();CHKERRQ(ierr);
1228   PetscFunctionReturn(0);
1229 }
1230 
1231 /* -------------------------------------------------------------------------- */
1232 /*MC
1233      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner
1234 
1235    Options Database Keys:
1236    Multigrid options(inherited)
1237 +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1238 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1239 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1240 -  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade
1241 
1242 
1243   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1244 $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1245 $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1246 $       See the Users Manual Chapter 4 for more details
1247 
1248   Level: intermediate
1249 
1250   Concepts: algebraic multigrid
1251 
1252 .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1253            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType()
1254 M*/
1255 
1256 #undef __FUNCT__
1257 #define __FUNCT__ "PCCreate_GAMG"
1258 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1259 {
1260   PetscErrorCode ierr;
1261   PC_GAMG        *pc_gamg;
1262   PC_MG          *mg;
1263 
1264   PetscFunctionBegin;
1265   /* register AMG type */
1266   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1267 
1268   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1269   ierr = PCSetType(pc, PCMG);CHKERRQ(ierr);
1270   ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr);
1271 
1272   /* create a supporting struct and attach it to pc */
1273   ierr         = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr);
1274   mg           = (PC_MG*)pc->data;
1275   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally from PCMG by GAMG code */
1276   mg->innerctx = pc_gamg;
1277 
1278   ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr);
1279 
1280   pc_gamg->setup_count = 0;
1281   /* these should be in subctx but repartitioning needs simple arrays */
1282   pc_gamg->data_sz = 0;
1283   pc_gamg->data    = 0;
1284 
1285   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1286   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1287   pc->ops->setup          = PCSetUp_GAMG;
1288   pc->ops->reset          = PCReset_GAMG;
1289   pc->ops->destroy        = PCDestroy_GAMG;
1290   mg->view                = PCView_GAMG;
1291 
1292   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr);
1293   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr);
1294   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr);
1295   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr);
1296   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr);
1297   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr);
1298   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr);
1299   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr);
1300   ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr);
1301   pc_gamg->repart           = PETSC_FALSE;
1302   pc_gamg->reuse_prol       = PETSC_FALSE;
1303   pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1304   pc_gamg->min_eq_proc      = 50;
1305   pc_gamg->coarse_eq_limit  = 50;
1306   pc_gamg->threshold        = 0.;
1307   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1308   pc_gamg->current_level    = 0; /* don't need to init really */
1309   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1310 
1311   ierr = PetscRandomCreate(PetscObjectComm((PetscObject)pc),&pc_gamg->random);CHKERRQ(ierr);
1312 
1313   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1314   ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr);
1315   PetscFunctionReturn(0);
1316 }
1317 
1318 #undef __FUNCT__
1319 #define __FUNCT__ "PCGAMGInitializePackage"
1320 /*@C
1321  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1322     from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1323     when using static libraries.
1324 
1325  Level: developer
1326 
1327  .keywords: PC, PCGAMG, initialize, package
1328  .seealso: PetscInitialize()
1329 @*/
1330 PetscErrorCode PCGAMGInitializePackage(void)
1331 {
1332   PetscErrorCode ierr;
1333 
1334   PetscFunctionBegin;
1335   if (PCGAMGPackageInitialized) PetscFunctionReturn(0);
1336   PCGAMGPackageInitialized = PETSC_TRUE;
1337   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr);
1338   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr);
1339   ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr);
1340   ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr);
1341 
1342   /* general events */
1343   ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr);
1344   ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr);
1345   ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr);
1346   ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr);
1347   ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr);
1348   ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr);
1349   ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr);
1350 
1351 #if defined PETSC_GAMG_USE_LOG
1352   ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr);
1353   ierr = PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr);
1354   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1355   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1356   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1357   ierr = PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr);
1358   ierr = PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr);
1359   ierr = PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr);
1360   ierr = PetscLogEventRegister("    search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr);
1361   ierr = PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr);
1362   ierr = PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr);
1363   ierr = PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr);
1364   ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr);
1365   ierr = PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr);
1366   ierr = PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr);
1367   ierr = PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr);
1368   ierr = PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr);
1369 
1370   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1371   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1372   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1373   /* create timer stages */
1374 #if defined GAMG_STAGES
1375   {
1376     char     str[32];
1377     PetscInt lidx;
1378     sprintf(str,"MG Level %d (finest)",0);
1379     ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr);
1380     for (lidx=1; lidx<9; lidx++) {
1381       sprintf(str,"MG Level %d",lidx);
1382       ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr);
1383     }
1384   }
1385 #endif
1386 #endif
1387   PetscFunctionReturn(0);
1388 }
1389 
1390 #undef __FUNCT__
1391 #define __FUNCT__ "PCGAMGFinalizePackage"
1392 /*@C
1393  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1394     called from PetscFinalize() automatically.
1395 
1396  Level: developer
1397 
1398  .keywords: Petsc, destroy, package
1399  .seealso: PetscFinalize()
1400 @*/
1401 PetscErrorCode PCGAMGFinalizePackage(void)
1402 {
1403   PetscErrorCode ierr;
1404 
1405   PetscFunctionBegin;
1406   PCGAMGPackageInitialized = PETSC_FALSE;
1407   ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr);
1408   PetscFunctionReturn(0);
1409 }
1410 
1411 #undef __FUNCT__
1412 #define __FUNCT__ "PCGAMGRegister"
1413 /*@C
1414  PCGAMGRegister - Register a PCGAMG implementation.
1415 
1416  Input Parameters:
1417  + type - string that will be used as the name of the GAMG type.
1418  - create - function for creating the gamg context.
1419 
1420   Level: advanced
1421 
1422  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1423 @*/
1424 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1425 {
1426   PetscErrorCode ierr;
1427 
1428   PetscFunctionBegin;
1429   ierr = PCGAMGInitializePackage();CHKERRQ(ierr);
1430   ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr);
1431   PetscFunctionReturn(0);
1432 }
1433 
1434