xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision ff218e97a57ed641f3ebc93f697e38ef0f3aa217)
1 /*
2  GAMG geometric-algebric multiogrid PC - Mark Adams 2011
3  */
4 #include <../src/ksp/pc/impls/gamg/gamg.h>
5 
6 PetscLogEvent gamg_setup_stages[NUM_SET];
7 
8 /* Private context for the GAMG preconditioner */
9 typedef struct gamg_TAG {
10   PetscInt       m_dim;
11   PetscInt       m_Nlevels;
12   PetscInt       m_data_sz;
13   PetscReal     *m_data; /* blocked vector of vertex data on fine grid (coordinates) */
14 } PC_GAMG;
15 
16 #define TOP_GRID_LIM 1000
17 
18 /* -----------------------------------------------------------------------------*/
19 #undef __FUNCT__
20 #define __FUNCT__ "PCReset_GAMG"
21 PetscErrorCode PCReset_GAMG(PC pc)
22 {
23   PetscErrorCode  ierr;
24   PC_MG           *mg = (PC_MG*)pc->data;
25   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
26 
27   PetscFunctionBegin;
28   ierr = PetscFree(pc_gamg->m_data);CHKERRQ(ierr);
29   PetscFunctionReturn(0);
30 }
31 
32 /* -------------------------------------------------------------------------- */
33 /*
34    PCSetCoordinates_GAMG
35 
36    Input Parameter:
37    .  pc - the preconditioner context
38 */
39 EXTERN_C_BEGIN
40 #undef __FUNCT__
41 #define __FUNCT__ "PCSetCoordinates_GAMG"
42 PetscErrorCode PCSetCoordinates_GAMG(PC pc, const int ndm,PetscReal *coords )
43 {
44   PC_MG          *mg = (PC_MG*)pc->data;
45   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
46   PetscErrorCode ierr;
47   PetscInt       bs, my0, tt;
48   Mat            mat = pc->pmat;
49   PetscInt       arrsz;
50 
51   PetscFunctionBegin;
52   ierr  = MatGetBlockSize( mat, &bs );               CHKERRQ( ierr );
53   ierr  = MatGetOwnershipRange( mat, &my0, &tt ); CHKERRQ(ierr);
54   arrsz = (tt-my0)/bs*ndm;
55 
56   // put coordinates
57   if (!pc_gamg->m_data || (pc_gamg->m_data_sz != arrsz)) {
58     ierr = PetscFree( pc_gamg->m_data );  CHKERRQ(ierr);
59     ierr = PetscMalloc(arrsz*sizeof(double), &pc_gamg->m_data ); CHKERRQ(ierr);
60   }
61 
62   /* copy data in */
63   for(tt=0;tt<arrsz;tt++){
64     pc_gamg->m_data[tt] = coords[tt];
65   }
66   pc_gamg->m_data_sz = arrsz;
67   pc_gamg->m_dim = ndm;
68 
69   PetscFunctionReturn(0);
70 }
71 EXTERN_C_END
72 
73 /* -------------------------------------------------------------------------- */
74 /*
75    partitionLevel
76 
77    Input Parameter:
78    . a_Amat_fine - matrix on this fine (k) level
79    . a_dime - 2 or 3
80    In/Output Parameter:
81    . a_P_inout - prolongation operator to the next level (k-1)
82    . a_coarse_crds - coordinates that need to be moved
83    . a_active_proc - number of active procs
84    Output Parameter:
85    . a_Amat_crs - coarse matrix that is created (k-1)
86 */
87 
88 #undef __FUNCT__
89 #define __FUNCT__ "partitionLevel"
90 PetscErrorCode partitionLevel( Mat a_Amat_fine,
91                                PetscInt a_dim,
92                                Mat *a_P_inout,
93                                PetscReal **a_coarse_crds,
94                                PetscMPIInt *a_active_proc,
95                                Mat *a_Amat_crs
96                             )
97 {
98   PetscErrorCode   ierr;
99   Mat              Amat, Pnew, Pold = *a_P_inout;
100   IS               new_indices,isnum;
101   MPI_Comm         wcomm = ((PetscObject)a_Amat_fine)->comm;
102   PetscMPIInt      nactive_procs,mype,npe;
103   PetscInt         Istart,Iend,Istart0,Iend0,ncrs0,ncrs_new,bs=1; /* bs ??? */
104 
105   PetscFunctionBegin;
106   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
107   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
108   /* RAP */
109   ierr = MatPtAP( a_Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Amat ); CHKERRQ(ierr);
110   ierr = MatGetOwnershipRange( Amat, &Istart0, &Iend0 );    CHKERRQ(ierr); /* x2 size of 'a_coarse_crds' */
111   ncrs0 = (Iend0 - Istart0)/bs;
112 
113   /* Repartition Amat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
114   {
115     PetscInt        neq,N,counts[npe];
116     IS              isnewproc;
117     PetscMPIInt     new_npe,targ_npe;
118 
119     ierr = MatGetSize( Amat, &neq, &N );CHKERRQ(ierr);
120 #define MIN_EQ_PROC 100
121     nactive_procs = *a_active_proc;
122     targ_npe = neq/MIN_EQ_PROC; /* hardwire min. number of eq/proc */
123     if( targ_npe == 0 || neq < TOP_GRID_LIM ) new_npe = 1; /* chop coarsest grid */
124     else if (targ_npe >= nactive_procs ) new_npe = nactive_procs; /* no change */
125     else {
126       PetscMPIInt     factstart,fact;
127       new_npe = -9999;
128       factstart = nactive_procs;
129       for(fact=factstart;fact>0;fact--){ /* try to find a better number of procs */
130         if( nactive_procs%fact==0 && neq/(nactive_procs/fact) > MIN_EQ_PROC ) {
131           new_npe = nactive_procs/fact;
132         }
133       }
134       assert(new_npe != -9999);
135     }
136     *a_active_proc = new_npe; /* output for next time */
137 PetscPrintf(PETSC_COMM_WORLD,"\t\t%s num active procs = %d (N loc = %d)\n",__FUNCT__,new_npe,ncrs0);
138     { /* partition: get 'isnewproc' */
139       MatPartitioning  mpart;
140       Mat              adj;
141       const PetscInt  *is_idx;
142       PetscInt         is_sz,kk,jj,old_fact=(npe/nactive_procs),new_fact=(npe/new_npe),*isnewproc_idx;
143       /* create sub communicator  */
144       MPI_Comm cm,new_comm;
145       int membershipKey = mype % old_fact;
146       ierr = MPI_Comm_split(wcomm, membershipKey, mype, &cm); CHKERRQ(ierr);
147       ierr = PetscCommDuplicate( cm, &new_comm, PETSC_NULL ); CHKERRQ(ierr);
148       ierr = MPI_Comm_free( &cm );                            CHKERRQ(ierr);
149 
150       /* MatPartitioningApply call MatConvert, which is collective */
151       ierr = MatConvert(Amat,MATMPIADJ,MAT_INITIAL_MATRIX,&adj);CHKERRQ(ierr);
152       if( membershipKey == 0 ) {
153         /* hack to fix global data that pmetis.c uses in 'adj' */
154         for( kk=0 , jj=0 ; kk<=npe ; jj++, kk += old_fact ) {
155           adj->rmap->range[jj] = adj->rmap->range[kk];
156         }
157         ierr = MatPartitioningCreate( new_comm, &mpart ); CHKERRQ(ierr);
158         ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
159         ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr);
160         ierr = MatPartitioningSetNParts( mpart, new_npe ); CHKERRQ(ierr);
161         ierr = MatPartitioningApply( mpart, &isnewproc ); CHKERRQ(ierr);
162         ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr);
163         /* collect IS info */
164         ierr = ISGetLocalSize( isnewproc, &is_sz );        CHKERRQ(ierr);
165         ierr = PetscMalloc( is_sz*sizeof(PetscInt), &isnewproc_idx ); CHKERRQ(ierr);
166         ierr = ISGetIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
167         for(kk=0;kk<is_sz;kk++){
168           /* spread partitioning across machine - probably the right thing to do but machine specific */
169           isnewproc_idx[kk] = is_idx[kk] * new_fact;
170         }
171         ierr = ISRestoreIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
172         ierr = ISDestroy( &isnewproc );                    CHKERRQ(ierr);
173       }
174       else {
175         isnewproc_idx = 0;
176         is_sz = 0;
177       }
178       ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
179       ierr = MPI_Comm_free( &new_comm );    CHKERRQ(ierr);
180       ierr = ISCreateGeneral( wcomm, is_sz, isnewproc_idx, PETSC_COPY_VALUES, &isnewproc );
181       if( membershipKey == 0 ) {
182         ierr = PetscFree( isnewproc_idx );  CHKERRQ(ierr);
183       }
184     }
185 
186     /*
187      Create an index set from the isnewproc index set to indicate the mapping TO
188      */
189     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
190     /*
191      Determine how many elements are assigned to each processor
192      */
193     ierr = ISPartitioningCount( isnewproc, npe, counts ); CHKERRQ(ierr);
194     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
195     ncrs_new = counts[mype];
196   }
197 
198   { /* Create a vector to contain the newly ordered element information */
199     const PetscInt *idx;
200     PetscInt        i,j,k;
201     IS              isscat;
202     PetscScalar    *array;
203     Vec             src_crd, dest_crd;
204     PetscReal      *coords = *a_coarse_crds;
205     VecScatter      vecscat;
206 
207     ierr = VecCreate( wcomm, &dest_crd );
208     ierr = VecSetSizes( dest_crd, a_dim*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
209     ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /*funny vector-get global options?*/
210     /*
211      There are three data items per cell (element), the integer vertex numbers of its three
212      coordinates (we convert to double to use the scatter) (one can think
213      of the vectors of having a block size of 3, then there is one index in idx[] for each element)
214      */
215     {
216       PetscInt tidx[ncrs0*a_dim];
217       ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
218       for (i=0,j=0; i<ncrs0; i++) {
219         for (k=0; k<a_dim; k++) tidx[j++] = idx[i]*a_dim + k;
220       }
221       ierr = ISCreateGeneral( wcomm, a_dim*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
222       CHKERRQ(ierr);
223       ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
224     }
225     /*
226      Create a vector to contain the original vertex information for each element
227      */
228     ierr = VecCreateSeq( PETSC_COMM_SELF, a_dim*ncrs0, &src_crd ); CHKERRQ(ierr);
229     ierr = VecGetArray( src_crd, &array );                        CHKERRQ(ierr);
230     for (i=0; i<a_dim*ncrs0; i++) array[i] = coords[i];
231     ierr = VecRestoreArray( src_crd, &array );                     CHKERRQ(ierr);
232     /*
233      Scatter the element vertex information (still in the original vertex ordering)
234      to the correct processor
235      */
236     ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
237     CHKERRQ(ierr);
238     ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
239     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
240     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
241     ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
242     ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
243     /*
244      Put the element vertex data into a new allocation of the gdata->ele
245      */
246     ierr = PetscFree( *a_coarse_crds );    CHKERRQ(ierr);
247     ierr = PetscMalloc( a_dim*ncrs_new*sizeof(PetscReal), a_coarse_crds );    CHKERRQ(ierr);
248     coords = *a_coarse_crds; /* convient */
249     ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
250     for (i=0; i<a_dim*ncrs_new; i++) coords[i] = PetscRealPart(array[i]);
251     ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
252     ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
253   }
254   /*
255    Invert for MatGetSubMatrix
256    */
257   ierr = ISInvertPermutation( isnum, ncrs_new, &new_indices ); CHKERRQ(ierr);
258   ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
259   ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
260   /* A_crs output */
261   ierr = MatGetSubMatrix( Amat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
262   CHKERRQ(ierr);
263   ierr = MatDestroy( &Amat ); CHKERRQ(ierr);
264   Amat = *a_Amat_crs;
265   /* prolongator */
266   ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
267   {
268     IS findices;
269     ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
270     ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
271     CHKERRQ(ierr);
272     ierr = ISDestroy( &findices ); CHKERRQ(ierr);
273   }
274   ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
275   *a_P_inout = Pnew; /* output */
276 
277   ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
278 
279   PetscFunctionReturn(0);
280 }
281 
282 #define GAMG_MAXLEVELS 20
283 #if defined(PETSC_USE_LOG)
284 PetscLogStage  gamg_stages[20];
285 #endif
286 /* -------------------------------------------------------------------------- */
287 /*
288    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
289                     by setting data structures and options.
290 
291    Input Parameter:
292 .  pc - the preconditioner context
293 
294    Application Interface Routine: PCSetUp()
295 
296    Notes:
297    The interface routine PCSetUp() is not usually called directly by
298    the user, but instead is called by PCApply() if necessary.
299 */
300 #undef __FUNCT__
301 #define __FUNCT__ "PCSetUp_GAMG"
302 PetscErrorCode PCSetUp_GAMG( PC pc )
303 {
304   PetscErrorCode  ierr;
305   PC_MG           *mg = (PC_MG*)pc->data;
306   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
307   Mat              Amat = pc->mat, Pmat = pc->pmat;
308   PetscBool        isSeq, isMPI;
309   PetscInt         fine_level, level, level1, M, N, bs, lidx;
310   MPI_Comm         wcomm = ((PetscObject)pc)->comm;
311   PetscMPIInt      mype,npe,nactivepe;
312   PetscBool isOK;
313 
314   Mat Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];  PetscReal *coarse_crds = 0, *crds = pc_gamg->m_data;
315 
316   PetscFunctionBegin;
317   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
318   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
319   if (pc->setupcalled){
320     /* no state data in GAMG to destroy (now) */
321     ierr = PCReset_MG(pc); CHKERRQ(ierr);
322   }
323   if (!pc_gamg->m_data) SETERRQ(wcomm,PETSC_ERR_SUP,"PCSetUp_GAMG called before PCSetCoordinates");
324   /* setup special features of PCGAMG */
325   ierr = PetscTypeCompare((PetscObject)Amat, MATSEQAIJ, &isSeq);CHKERRQ(ierr);
326   ierr = PetscTypeCompare((PetscObject)Amat, MATMPIAIJ, &isMPI);CHKERRQ(ierr);
327   if (isMPI) {
328   } else if (isSeq) {
329   } else SETERRQ1(wcomm,PETSC_ERR_ARG_WRONG, "Matrix type '%s' cannot be used with GAMG. GAMG can only handle AIJ matrices.",((PetscObject)Amat)->type_name);
330 
331   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
332   ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr);
333   if(bs!=1) SETERRQ1(wcomm,PETSC_ERR_ARG_WRONG, "GAMG only supports scalar prblems bs = '%d'.",bs);
334 
335   /* Get A_i and R_i */
336   ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr);
337 PetscPrintf(PETSC_COMM_WORLD,"[%d]%s level %d N=%d\n",0,__FUNCT__,0,N);
338   for (level=0, Aarr[0] = Pmat, nactivepe = npe;
339        level < GAMG_MAXLEVELS-1 && (level==0 || M>TOP_GRID_LIM); /* hard wired stopping logic */
340        level++ ) {
341     level1 = level + 1;
342     ierr = PetscLogEventBegin(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
343     ierr = createProlongation( Aarr[level], crds, pc_gamg->m_dim,
344                                &Parr[level1], &coarse_crds, &isOK );
345     CHKERRQ(ierr);
346     ierr = PetscLogEventEnd(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
347 
348     ierr = PetscFree( crds ); CHKERRQ( ierr );
349     if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */
350     if( isOK ) {
351       ierr = PetscLogEventBegin(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
352       ierr = partitionLevel( Aarr[level], pc_gamg->m_dim, &Parr[level1], &coarse_crds, &nactivepe, &Aarr[level1] );
353       CHKERRQ(ierr);
354       ierr = PetscLogEventEnd(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
355       ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
356 PetscPrintf(PETSC_COMM_WORLD,"[%d]%s done level %d N=%d, active pe = %d\n",0,__FUNCT__,level1,N,nactivepe);
357     }
358     else{
359       break;
360     }
361     crds = coarse_crds;
362   }
363   ierr = PetscFree( coarse_crds ); CHKERRQ( ierr );
364 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1);
365   pc_gamg->m_data = 0; /* destroyed coordinate data */
366   pc_gamg->m_Nlevels = level + 1;
367   fine_level = level;
368   ierr = PCMGSetLevels(pc,pc_gamg->m_Nlevels,PETSC_NULL);CHKERRQ(ierr);
369 
370   /* set default smoothers */
371   PetscReal emax = 2.0, emin;
372   for (level=1; level<=fine_level; level++){
373     KSP smoother; PC subpc;
374     ierr = PCMGGetSmoother(pc,level,&smoother);CHKERRQ(ierr);
375     ierr = KSPSetType(smoother,KSPCHEBYCHEV);CHKERRQ(ierr);
376     emin = emax/10.0; /* fix!!! */
377     ierr = KSPGetPC(smoother,&subpc);CHKERRQ(ierr);
378     ierr = PCSetType(subpc,PCJACOBI);CHKERRQ(ierr);
379     ierr = KSPChebychevSetEigenvalues(smoother, emax, emin);CHKERRQ(ierr); /* need auto !!!!*/
380   }
381   ierr = PCSetFromOptions_MG(pc); CHKERRQ(ierr); /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
382   {
383     PetscBool galerkin;
384     ierr = PCMGGetGalerkin( pc,  &galerkin); CHKERRQ(ierr);
385     if(galerkin){
386       SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG.");
387     }
388   }
389   {
390     char str[32];
391     sprintf(str,"MG Level %d (%d)",0,pc_gamg->m_Nlevels-1);
392     PetscLogStageRegister(str, &gamg_stages[fine_level]);
393   }
394   /* create coarse level and the interpolation between the levels */
395   for (level=0,lidx=pc_gamg->m_Nlevels-1; level<fine_level; level++,lidx--){
396     level1 = level + 1;
397     ierr = PCMGSetInterpolation(pc,level1,Parr[lidx]);CHKERRQ(ierr);
398     if( !PETSC_TRUE ) {
399       PetscViewer viewer; char fname[32];
400       sprintf(fname,"Amat_%d.m",lidx);
401       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
402       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
403       ierr = MatView( Aarr[lidx], viewer ); CHKERRQ(ierr);
404       ierr = PetscViewerDestroy( &viewer );
405     }
406     ierr = MatDestroy( &Parr[lidx] );  CHKERRQ(ierr);
407     {
408       KSP smoother;
409       ierr = PCMGGetSmoother(pc,level,&smoother); CHKERRQ(ierr);
410       ierr = KSPSetOperators( smoother, Aarr[lidx], Aarr[lidx], DIFFERENT_NONZERO_PATTERN );
411       CHKERRQ(ierr);
412       ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
413     }
414     ierr = MatDestroy( &Aarr[lidx] );  CHKERRQ(ierr);
415     {
416       char str[32];
417       sprintf(str,"MG Level %d (%d)",level+1,lidx-1);
418       PetscLogStageRegister(str, &gamg_stages[lidx-1]);
419     }
420   }
421   { /* fine level (no P) */
422     KSP smoother;
423     ierr = PCMGGetSmoother(pc,fine_level,&smoother); CHKERRQ(ierr);
424     ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], DIFFERENT_NONZERO_PATTERN );
425     CHKERRQ(ierr);
426     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
427   }
428   /* setupcalled is set to 0 so that MG is setup from scratch */
429   pc->setupcalled = 0;
430   ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
431   PetscFunctionReturn(0);
432 }
433 
434 /* -------------------------------------------------------------------------- */
435 /*
436    PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
437    that was created with PCCreate_GAMG().
438 
439    Input Parameter:
440 .  pc - the preconditioner context
441 
442    Application Interface Routine: PCDestroy()
443 */
444 #undef __FUNCT__
445 #define __FUNCT__ "PCDestroy_GAMG"
446 PetscErrorCode PCDestroy_GAMG(PC pc)
447 {
448   PetscErrorCode  ierr;
449   PC_MG           *mg = (PC_MG*)pc->data;
450   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
451 
452   PetscFunctionBegin;
453   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
454   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
455   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "PCSetFromOptions_GAMG"
461 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
462 {
463   /* PetscErrorCode  ierr; */
464   /* PC_MG           *mg = (PC_MG*)pc->data; */
465   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
466   /* MPI_Comm        comm = ((PetscObject)pc)->comm; */
467 
468   PetscFunctionBegin;
469   PetscFunctionReturn(0);
470 }
471 
472 /* -------------------------------------------------------------------------- */
473 /*
474  PCCreate_GAMG - Creates a GAMG preconditioner context, PC_GAMG
475 
476    Input Parameter:
477 .  pc - the preconditioner context
478 
479    Application Interface Routine: PCCreate()
480 
481   */
482  /* MC
483      PCGAMG - Use algebraic multigrid preconditioning. This preconditioner requires you provide
484        fine grid discretization matrix and coordinates on the fine grid.
485 
486    Options Database Key:
487    Multigrid options(inherited)
488 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (MGSetCycles)
489 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (MGSetNumberSmoothUp)
490 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (MGSetNumberSmoothDown)
491    -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
492    GAMG options:
493 
494    Level: intermediate
495   Concepts: multigrid
496 
497 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
498            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), MPSetCycles(), PCMGSetNumberSmoothDown(),
499            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
500            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
501            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
502 M */
503 
504 EXTERN_C_BEGIN
505 #undef __FUNCT__
506 #define __FUNCT__ "PCCreate_GAMG"
507 PetscErrorCode  PCCreate_GAMG(PC pc)
508 {
509   PetscErrorCode  ierr;
510   PC_GAMG         *pc_gamg;
511   PC_MG           *mg;
512   PetscClassId     cookie;
513 
514   PetscFunctionBegin;
515   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
516   ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
517   ierr = PetscObjectChangeTypeName((PetscObject)pc,PCGAMG);CHKERRQ(ierr);
518 
519   /* create a supporting struct and attach it to pc */
520   ierr = PetscNewLog(pc,PC_GAMG,&pc_gamg);CHKERRQ(ierr);
521   mg = (PC_MG*)pc->data;
522   mg->innerctx = pc_gamg;
523 
524   pc_gamg->m_Nlevels    = -1;
525 
526   /* overwrite the pointers of PCMG by the functions of PCGAMG */
527   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
528   pc->ops->setup          = PCSetUp_GAMG;
529   pc->ops->reset          = PCReset_GAMG;
530   pc->ops->destroy        = PCDestroy_GAMG;
531 
532   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
533 					    "PCSetCoordinates_C",
534 					    "PCSetCoordinates_GAMG",
535 					    PCSetCoordinates_GAMG);CHKERRQ(ierr);
536 
537   PetscClassIdRegister("GAMG Setup",&cookie);
538   PetscLogEventRegister("GAMG-createProl", cookie, &gamg_setup_stages[SET1]);
539   PetscLogEventRegister("GAMG-partLevel", cookie, &gamg_setup_stages[SET2]);
540   PetscLogEventRegister("GAMG-MIS Graph", cookie, &gamg_setup_stages[SET3]);
541   PetscLogEventRegister("GAMG-MIS-Agg", cookie, &gamg_setup_stages[SET4]);
542   PetscLogEventRegister("GAMG-growSupp", cookie, &gamg_setup_stages[SET5]);
543   PetscLogEventRegister("GAMG-tri-Prol", cookie, &gamg_setup_stages[SET6]);
544   PetscLogEventRegister("GAMG-find-prol", cookie, &gamg_setup_stages[FIND_V]);
545 
546   PetscFunctionReturn(0);
547 }
548 EXTERN_C_END
549