xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision bbead8a294046a48bbbe8fbcc40ad4dafe54ecae)
1 /*
2  GAMG geometric-algebric multiogrid PC - Mark Adams 2011
3  */
4 #include <../src/ksp/pc/impls/gamg/gamg.h>
5 
6 PetscLogEvent gamg_setup_stages[NUM_SET];
7 
8 /* Private context for the GAMG preconditioner */
9 typedef struct gamg_TAG {
10   PetscInt       m_dim;
11   PetscInt       m_Nlevels;
12   PetscInt       m_data_sz;
13   PetscReal     *m_data; /* blocked vector of vertex data on fine grid (coordinates) */
14 } PC_GAMG;
15 
16 #define TOP_GRID_LIM 100
17 
18 /* -----------------------------------------------------------------------------*/
19 #undef __FUNCT__
20 #define __FUNCT__ "PCReset_GAMG"
21 PetscErrorCode PCReset_GAMG(PC pc)
22 {
23   PetscErrorCode  ierr;
24   PC_MG           *mg = (PC_MG*)pc->data;
25   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
26 
27   PetscFunctionBegin;
28   ierr = PetscFree(pc_gamg->m_data);CHKERRQ(ierr);
29   PetscFunctionReturn(0);
30 }
31 
32 /* -------------------------------------------------------------------------- */
33 /*
34    PCSetCoordinates_GAMG
35 
36    Input Parameter:
37    .  pc - the preconditioner context
38 */
39 EXTERN_C_BEGIN
40 #undef __FUNCT__
41 #define __FUNCT__ "PCSetCoordinates_GAMG"
42 PetscErrorCode PCSetCoordinates_GAMG(PC pc, const int ndm,PetscReal *coords )
43 {
44   PC_MG          *mg = (PC_MG*)pc->data;
45   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
46   PetscErrorCode ierr;
47   PetscInt       bs, my0, tt;
48   Mat            mat = pc->pmat;
49   PetscInt       arrsz;
50 
51   PetscFunctionBegin;
52   ierr  = MatGetBlockSize( mat, &bs );               CHKERRQ( ierr );
53   ierr  = MatGetOwnershipRange( mat, &my0, &tt ); CHKERRQ(ierr);
54   arrsz = (tt-my0)/bs*ndm;
55 
56   // put coordinates
57   if (!pc_gamg->m_data || (pc_gamg->m_data_sz != arrsz)) {
58     ierr = PetscFree( pc_gamg->m_data );  CHKERRQ(ierr);
59     ierr = PetscMalloc(arrsz*sizeof(double), &pc_gamg->m_data ); CHKERRQ(ierr);
60   }
61 
62   /* copy data in */
63   for(tt=0;tt<arrsz;tt++){
64     pc_gamg->m_data[tt] = coords[tt];
65   }
66   pc_gamg->m_data_sz = arrsz;
67   pc_gamg->m_dim = ndm;
68 
69   PetscFunctionReturn(0);
70 }
71 EXTERN_C_END
72 
73 /* -------------------------------------------------------------------------- */
74 /*
75    partitionLevel
76 
77    Input Parameter:
78    . a_Amat_fine - matrix on this fine (k) level
79    . a_dime - 2 or 3
80    In/Output Parameter:
81    . a_P_inout - prolongation operator to the next level (k-1)
82    . a_coarse_crds - coordinates that need to be moved
83    . a_active_proc - number of active procs
84    Output Parameter:
85    . a_Amat_crs - coarse matrix that is created (k-1)
86 */
87 
88 #undef __FUNCT__
89 #define __FUNCT__ "partitionLevel"
90 PetscErrorCode partitionLevel( Mat a_Amat_fine,
91                                PetscInt a_dim,
92                                Mat *a_P_inout,
93                                PetscReal **a_coarse_crds,
94                                PetscMPIInt *a_active_proc,
95                                Mat *a_Amat_crs
96                             )
97 {
98   PetscErrorCode   ierr;
99   Mat              Amat, Pnew, Pold = *a_P_inout;
100   IS               new_indices,isnum;
101   MPI_Comm         wcomm = ((PetscObject)a_Amat_fine)->comm;
102   PetscMPIInt      nactive_procs,mype,npe;
103   PetscInt         Istart,Iend,Istart0,Iend0,ncrs0,ncrs_new,bs=1; /* bs ??? */
104 
105   PetscFunctionBegin;
106   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
107   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
108   /* RAP */
109   ierr = MatPtAP( a_Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Amat ); CHKERRQ(ierr);
110   ierr = MatGetOwnershipRange( Amat, &Istart0, &Iend0 );    CHKERRQ(ierr); /* x2 size of 'a_coarse_crds' */
111   ncrs0 = (Iend0 - Istart0)/bs;
112 
113   /* Repartition Amat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
114   {
115     PetscInt        neq,N,counts[npe];
116     IS              isnewproc;
117     PetscMPIInt     new_npe,targ_npe;
118 
119     ierr = MatGetSize( Amat, &neq, &N );CHKERRQ(ierr);
120 #define MIN_EQ_PROC 100
121     nactive_procs = *a_active_proc;
122     targ_npe = neq/MIN_EQ_PROC; /* hardwire min. number of eq/proc */
123     if( targ_npe == 0 || neq < TOP_GRID_LIM ) new_npe = 1; /* chop coarsest grid */
124     else if (targ_npe >= nactive_procs ) new_npe = nactive_procs; /* no change */
125     else {
126       PetscMPIInt     factstart,fact;
127       new_npe = -9999;
128       factstart = nactive_procs;
129       for(fact=factstart;fact>0;fact--){ /* try to find a better number of procs */
130         if( nactive_procs%fact==0 && neq/(nactive_procs/fact) > MIN_EQ_PROC ) {
131           new_npe = nactive_procs/fact;
132         }
133       }
134       assert(new_npe != -9999);
135     }
136     *a_active_proc = new_npe; /* output for next time */
137 PetscPrintf(PETSC_COMM_WORLD,"\t\t%s num active procs = %d (N loc = %d)\n",__FUNCT__,new_npe,ncrs0);
138     { /* partition: get 'isnewproc' */
139       MatPartitioning  mpart;
140       Mat              adj;
141       const PetscInt  *is_idx;
142       PetscInt         is_sz,kk,jj,old_fact=(npe/nactive_procs),new_fact=(npe/new_npe),*isnewproc_idx;
143       /* create sub communicator  */
144       MPI_Comm cm,new_comm;
145       int membershipKey = mype % old_fact;
146       ierr = MPI_Comm_split(wcomm, membershipKey, mype, &cm); CHKERRQ(ierr);
147       ierr = PetscCommDuplicate( cm, &new_comm, PETSC_NULL ); CHKERRQ(ierr);
148       ierr = MPI_Comm_free( &cm );                            CHKERRQ(ierr);
149 
150       /* MatPartitioningApply call MatConvert, which is collective */
151       ierr = MatConvert(Amat,MATMPIADJ,MAT_INITIAL_MATRIX,&adj);CHKERRQ(ierr);
152       if( membershipKey == 0 ) {
153         /* hack to fix global data that pmetis.c uses in 'adj' */
154         for( kk=0 , jj=0 ; kk<=npe ; jj++, kk += old_fact ) {
155           adj->rmap->range[jj] = adj->rmap->range[kk];
156         }
157         ierr = MatPartitioningCreate( new_comm, &mpart ); CHKERRQ(ierr);
158         ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
159         ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr);
160         ierr = MatPartitioningSetNParts( mpart, new_npe ); CHKERRQ(ierr);
161         ierr = MatPartitioningApply( mpart, &isnewproc ); CHKERRQ(ierr);
162         ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr);
163         /* collect IS info */
164         ierr = ISGetLocalSize( isnewproc, &is_sz );        CHKERRQ(ierr);
165         ierr = PetscMalloc( is_sz*sizeof(PetscInt), &isnewproc_idx ); CHKERRQ(ierr);
166         ierr = ISGetIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
167         for(kk=0;kk<is_sz;kk++){
168           /* spread partitioning across machine - probably the right thing to do but machine specific */
169           isnewproc_idx[kk] = is_idx[kk] * new_fact;
170         }
171         ierr = ISRestoreIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
172         ierr = ISDestroy( &isnewproc );                    CHKERRQ(ierr);
173       }
174       else {
175         isnewproc_idx = 0;
176         is_sz = 0;
177       }
178       ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
179       ierr = MPI_Comm_free( &new_comm );    CHKERRQ(ierr);
180       ierr = ISCreateGeneral( wcomm, is_sz, isnewproc_idx, PETSC_COPY_VALUES, &isnewproc );
181       if( membershipKey == 0 ) {
182         ierr = PetscFree( isnewproc_idx );  CHKERRQ(ierr);
183       }
184     }
185 
186     /*
187      Create an index set from the isnewproc index set to indicate the mapping TO
188      */
189     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
190     /*
191      Determine how many elements are assigned to each processor
192      */
193     ierr = ISPartitioningCount( isnewproc, npe, counts ); CHKERRQ(ierr);
194     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
195     ncrs_new = counts[mype];
196   }
197 
198   { /* Create a vector to contain the newly ordered element information */
199     const PetscInt *idx;
200     PetscInt        i,j,k;
201     IS              isscat;
202     PetscScalar    *array;
203     Vec             src_crd, dest_crd;
204     PetscReal      *coords = *a_coarse_crds;
205     VecScatter      vecscat;
206 
207     ierr = VecCreate( wcomm, &dest_crd );
208     ierr = VecSetSizes( dest_crd, a_dim*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
209     ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /*funny vector-get global options?*/
210     /*
211      There are three data items per cell (element), the integer vertex numbers of its three
212      coordinates (we convert to double to use the scatter) (one can think
213      of the vectors of having a block size of 3, then there is one index in idx[] for each element)
214      */
215     {
216       PetscInt tidx[ncrs0*a_dim];
217       ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
218       for (i=0,j=0; i<ncrs0; i++) {
219         for (k=0; k<a_dim; k++) tidx[j++] = idx[i]*a_dim + k;
220       }
221       ierr = ISCreateGeneral( wcomm, a_dim*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
222       CHKERRQ(ierr);
223       ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
224     }
225     /*
226      Create a vector to contain the original vertex information for each element
227      */
228     ierr = VecCreateSeq( PETSC_COMM_SELF, a_dim*ncrs0, &src_crd ); CHKERRQ(ierr);
229     ierr = VecGetArray( src_crd, &array );                        CHKERRQ(ierr);
230     for (i=0; i<a_dim*ncrs0; i++) array[i] = coords[i];
231     ierr = VecRestoreArray( src_crd, &array );                     CHKERRQ(ierr);
232     /*
233      Scatter the element vertex information (still in the original vertex ordering)
234      to the correct processor
235      */
236     ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
237     CHKERRQ(ierr);
238     ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
239     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
240     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
241     ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
242     ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
243     /*
244      Put the element vertex data into a new allocation of the gdata->ele
245      */
246     ierr = PetscFree( *a_coarse_crds );    CHKERRQ(ierr);
247     ierr = PetscMalloc( a_dim*ncrs_new*sizeof(PetscReal), a_coarse_crds );    CHKERRQ(ierr);
248     coords = *a_coarse_crds; /* convient */
249     ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
250     for (i=0; i<a_dim*ncrs_new; i++) coords[i] = PetscRealPart(array[i]);
251     ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
252     ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
253   }
254   /*
255    Invert for MatGetSubMatrix
256    */
257   ierr = ISInvertPermutation( isnum, ncrs_new, &new_indices ); CHKERRQ(ierr);
258   ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
259   ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
260   /* A_crs output */
261   ierr = MatGetSubMatrix( Amat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
262   CHKERRQ(ierr);
263   ierr = MatDestroy( &Amat ); CHKERRQ(ierr);
264   Amat = *a_Amat_crs;
265   /* prolongator */
266   ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
267   {
268     IS findices;
269     ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
270     ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
271     CHKERRQ(ierr);
272     ierr = ISDestroy( &findices ); CHKERRQ(ierr);
273   }
274   ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
275   *a_P_inout = Pnew; /* output */
276 
277   ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
278 
279   PetscFunctionReturn(0);
280 }
281 
282 #define GAMG_MAXLEVELS 20
283 #if defined(PETSC_USE_LOG)
284 PetscLogStage  gamg_stages[20];
285 #endif
286 /* -------------------------------------------------------------------------- */
287 /*
288    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
289                     by setting data structures and options.
290 
291    Input Parameter:
292 .  pc - the preconditioner context
293 
294    Application Interface Routine: PCSetUp()
295 
296    Notes:
297    The interface routine PCSetUp() is not usually called directly by
298    the user, but instead is called by PCApply() if necessary.
299 */
300 #undef __FUNCT__
301 #define __FUNCT__ "PCSetUp_GAMG"
302 PetscErrorCode PCSetUp_GAMG( PC pc )
303 {
304   PetscErrorCode  ierr;
305   PC_MG           *mg = (PC_MG*)pc->data;
306   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
307   Mat              Amat = pc->mat, Pmat = pc->pmat;
308   PetscBool        isSeq, isMPI;
309   PetscInt         fine_level, level, level1, M, N, bs, lidx;
310   MPI_Comm         wcomm = ((PetscObject)pc)->comm;
311   PetscMPIInt      mype,npe,nactivepe;
312   PetscBool isOK;
313 
314   Mat Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];  PetscReal *coarse_crds = 0, *crds = pc_gamg->m_data;
315 
316   PetscFunctionBegin;
317   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
318   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
319   if (pc->setupcalled){
320     /* no state data in GAMG to destroy (now) */
321     ierr = PCReset_MG(pc); CHKERRQ(ierr);
322   }
323   if (!pc_gamg->m_data) SETERRQ(wcomm,PETSC_ERR_SUP,"PCSetUp_GAMG called before PCSetCoordinates");
324   /* setup special features of PCGAMG */
325   ierr = PetscTypeCompare((PetscObject)Amat, MATSEQAIJ, &isSeq);CHKERRQ(ierr);
326   ierr = PetscTypeCompare((PetscObject)Amat, MATMPIAIJ, &isMPI);CHKERRQ(ierr);
327   if (isMPI) {
328   } else if (isSeq) {
329   } else SETERRQ1(wcomm,PETSC_ERR_ARG_WRONG, "Matrix type '%s' cannot be used with GAMG. GAMG can only handle AIJ matrices.",((PetscObject)Amat)->type_name);
330 
331   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
332   ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr);
333   if(bs!=1) SETERRQ1(wcomm,PETSC_ERR_ARG_WRONG, "GAMG only supports scalar prblems bs = '%d'.",bs);
334 
335   /* Get A_i and R_i */
336   ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr);
337 PetscPrintf(PETSC_COMM_WORLD,"[%d]%s level %d N=%d\n",0,__FUNCT__,0,N);
338   for (level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */
339        level < GAMG_MAXLEVELS-1 && (level==0 || M>TOP_GRID_LIM) && (npe==1 || nactivepe>1);
340        level++ ) {
341     level1 = level + 1;
342     ierr = PetscLogEventBegin(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
343     ierr = createProlongation( Aarr[level], crds, pc_gamg->m_dim,
344                                &Parr[level1], &coarse_crds, &isOK );
345     CHKERRQ(ierr);
346     ierr = PetscLogEventEnd(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
347 
348     ierr = PetscFree( crds ); CHKERRQ( ierr );
349     if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */
350     if( isOK ) {
351       ierr = PetscLogEventBegin(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
352       ierr = partitionLevel( Aarr[level], pc_gamg->m_dim, &Parr[level1], &coarse_crds, &nactivepe, &Aarr[level1] );
353       CHKERRQ(ierr);
354       ierr = PetscLogEventEnd(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
355       ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
356 PetscPrintf(PETSC_COMM_WORLD,"[%d]%s done level %d N=%d, active pe = %d\n",0,__FUNCT__,level1,N,nactivepe);
357     }
358     else{
359       break;
360     }
361     crds = coarse_crds;
362   }
363   ierr = PetscFree( coarse_crds ); CHKERRQ( ierr );
364 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1);
365   pc_gamg->m_data = 0; /* destroyed coordinate data */
366   pc_gamg->m_Nlevels = level + 1;
367   fine_level = level;
368   ierr = PCMGSetLevels(pc,pc_gamg->m_Nlevels,PETSC_NULL);CHKERRQ(ierr);
369 
370   /* set default smoothers */
371   for (level=1,lidx=pc_gamg->m_Nlevels-2;
372        level <= fine_level;
373        level++,lidx--) {
374     PetscReal emax, emin;
375     KSP smoother; PC subpc;
376     ierr = PCMGGetSmoother( pc, level, &smoother ); CHKERRQ(ierr);
377     ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr);
378     { /* eigen estimate 'emax' */
379       KSP eksp; Mat Lmat = Aarr[lidx];
380       Vec bb, xx; PC pc;
381       PetscInt N1, N0, tt;
382       ierr = MatGetVecs( Lmat, &bb, 0 );         CHKERRQ(ierr);
383       ierr = MatGetVecs( Lmat, &xx, 0 );         CHKERRQ(ierr);
384       {
385 	PetscRandom    rctx;
386 	ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr);
387 	ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
388 	ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
389 	ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr);
390       }
391       ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr);
392       ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr);
393       ierr = KSPSetOperators( eksp, Lmat, Lmat, DIFFERENT_NONZERO_PATTERN ); CHKERRQ( ierr );
394       ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr );
395       ierr = PCSetType( pc, PCJACOBI ); CHKERRQ(ierr); /* should be same as above */
396       ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 5 );
397       CHKERRQ(ierr);
398       ierr = KSPSetConvergenceTest( eksp, KSPSkipConverged, 0, 0 ); CHKERRQ(ierr);
399       ierr = KSPSetNormType( eksp, KSP_NORM_NONE );                 CHKERRQ(ierr);
400 
401       ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr);
402       ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr);
403       ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr);
404       ierr = MatGetSize( Lmat, &N1, &tt );         CHKERRQ(ierr);
405       ierr = MatGetSize( Aarr[lidx+1], &N0, &tt );CHKERRQ(ierr);
406 PetscPrintf(PETSC_COMM_WORLD,"\t\t%s max eigen = %e (N=%d)\n",__FUNCT__,emax,N1);
407       emax *= 1.05;
408 
409       ierr = VecDestroy( &xx );       CHKERRQ(ierr);
410       ierr = VecDestroy( &bb );       CHKERRQ(ierr);
411       ierr = KSPDestroy( &eksp );       CHKERRQ(ierr);
412 
413       emin = emax/((PetscReal)N1/(PetscReal)N0); /* this should be about the coarsening rate */
414       ierr = KSPSetOperators( smoother, Lmat, Lmat, DIFFERENT_NONZERO_PATTERN );
415     }
416     ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr);
417     ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
418     ierr = PCSetType( subpc, PCJACOBI ); CHKERRQ(ierr);
419     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
420   }
421   {
422     KSP smoother; /* coarse grid */
423     Mat Lmat = Aarr[pc_gamg->m_Nlevels-1];
424     ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr);
425     ierr = KSPSetOperators( smoother, Lmat, Lmat, DIFFERENT_NONZERO_PATTERN );
426     CHKERRQ(ierr);
427     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
428   }
429   /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
430   ierr = PCSetFromOptions_MG(pc); CHKERRQ(ierr);
431   {
432     PetscBool galerkin;
433     ierr = PCMGGetGalerkin( pc,  &galerkin); CHKERRQ(ierr);
434     if(galerkin){
435       SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG.");
436     }
437   }
438 
439   /* set interpolation between the levels, create timer stages, clean up */
440   if( PETSC_FALSE ) {
441     char str[32];
442     sprintf(str,"MG Level %d (%d)",0,pc_gamg->m_Nlevels-1);
443     PetscLogStageRegister(str, &gamg_stages[fine_level]);
444   }
445   for (level=0,lidx=pc_gamg->m_Nlevels-1;
446        level<fine_level;
447        level++, lidx--){
448     level1 = level + 1;
449     ierr = PCMGSetInterpolation( pc, level1, Parr[lidx] );CHKERRQ(ierr);
450     if( !PETSC_TRUE ) {
451       PetscViewer viewer; char fname[32];
452       sprintf(fname,"Amat_%d.m",lidx);
453       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
454       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
455       ierr = MatView( Aarr[lidx], viewer ); CHKERRQ(ierr);
456       ierr = PetscViewerDestroy( &viewer );
457     }
458     ierr = MatDestroy( &Parr[lidx] );  CHKERRQ(ierr);
459     ierr = MatDestroy( &Aarr[lidx] );  CHKERRQ(ierr);
460     if( PETSC_FALSE ) {
461       char str[32];
462       sprintf(str,"MG Level %d (%d)",level+1,lidx-1);
463       PetscLogStageRegister(str, &gamg_stages[lidx-1]);
464     }
465   }
466 
467   /* setupcalled is set to 0 so that MG is setup from scratch */
468   pc->setupcalled = 0;
469   ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
470   PetscFunctionReturn(0);
471 }
472 
473 /* -------------------------------------------------------------------------- */
474 /*
475    PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
476    that was created with PCCreate_GAMG().
477 
478    Input Parameter:
479 .  pc - the preconditioner context
480 
481    Application Interface Routine: PCDestroy()
482 */
483 #undef __FUNCT__
484 #define __FUNCT__ "PCDestroy_GAMG"
485 PetscErrorCode PCDestroy_GAMG(PC pc)
486 {
487   PetscErrorCode  ierr;
488   PC_MG           *mg = (PC_MG*)pc->data;
489   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
490 
491   PetscFunctionBegin;
492   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
493   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
494   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
495   PetscFunctionReturn(0);
496 }
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "PCSetFromOptions_GAMG"
500 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
501 {
502   /* PetscErrorCode  ierr; */
503   /* PC_MG           *mg = (PC_MG*)pc->data; */
504   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
505   /* MPI_Comm        comm = ((PetscObject)pc)->comm; */
506 
507   PetscFunctionBegin;
508   PetscFunctionReturn(0);
509 }
510 
511 /* -------------------------------------------------------------------------- */
512 /*
513  PCCreate_GAMG - Creates a GAMG preconditioner context, PC_GAMG
514 
515    Input Parameter:
516 .  pc - the preconditioner context
517 
518    Application Interface Routine: PCCreate()
519 
520   */
521  /* MC
522      PCGAMG - Use algebraic multigrid preconditioning. This preconditioner requires you provide
523        fine grid discretization matrix and coordinates on the fine grid.
524 
525    Options Database Key:
526    Multigrid options(inherited)
527 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (MGSetCycles)
528 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (MGSetNumberSmoothUp)
529 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (MGSetNumberSmoothDown)
530    -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
531    GAMG options:
532 
533    Level: intermediate
534   Concepts: multigrid
535 
536 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
537            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), MPSetCycles(), PCMGSetNumberSmoothDown(),
538            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
539            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
540            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
541 M */
542 
543 EXTERN_C_BEGIN
544 #undef __FUNCT__
545 #define __FUNCT__ "PCCreate_GAMG"
546 PetscErrorCode  PCCreate_GAMG(PC pc)
547 {
548   PetscErrorCode  ierr;
549   PC_GAMG         *pc_gamg;
550   PC_MG           *mg;
551   PetscClassId     cookie;
552 
553   PetscFunctionBegin;
554   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
555   ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
556   ierr = PetscObjectChangeTypeName((PetscObject)pc,PCGAMG);CHKERRQ(ierr);
557 
558   /* create a supporting struct and attach it to pc */
559   ierr = PetscNewLog(pc,PC_GAMG,&pc_gamg);CHKERRQ(ierr);
560   mg = (PC_MG*)pc->data;
561   mg->innerctx = pc_gamg;
562 
563   pc_gamg->m_Nlevels    = -1;
564 
565   /* overwrite the pointers of PCMG by the functions of PCGAMG */
566   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
567   pc->ops->setup          = PCSetUp_GAMG;
568   pc->ops->reset          = PCReset_GAMG;
569   pc->ops->destroy        = PCDestroy_GAMG;
570 
571   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
572 					    "PCSetCoordinates_C",
573 					    "PCSetCoordinates_GAMG",
574 					    PCSetCoordinates_GAMG);CHKERRQ(ierr);
575 
576   PetscClassIdRegister("GAMG Setup",&cookie);
577   PetscLogEventRegister("GAMG-createProl", cookie, &gamg_setup_stages[SET1]);
578   PetscLogEventRegister("GAMG-partLevel", cookie, &gamg_setup_stages[SET2]);
579   PetscLogEventRegister("GAMG-MIS Graph", cookie, &gamg_setup_stages[SET3]);
580   PetscLogEventRegister("GAMG-MIS-Agg", cookie, &gamg_setup_stages[SET4]);
581   PetscLogEventRegister("GAMG-growSupp", cookie, &gamg_setup_stages[SET5]);
582   PetscLogEventRegister("GAMG-tri-Prol", cookie, &gamg_setup_stages[SET6]);
583   PetscLogEventRegister("GAMG-find-prol", cookie, &gamg_setup_stages[FIND_V]);
584 
585   PetscFunctionReturn(0);
586 }
587 EXTERN_C_END
588