xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision d804b0d752269eab3ddf34557cf42fbcf69e8d32)
1 /*
2  GAMG geometric-algebric multiogrid PC - Mark Adams 2011
3  */
4 #include <../src/ksp/pc/impls/gamg/gamg.h>
5 
6 PetscLogEvent gamg_setup_stages[NUM_SET];
7 
8 /* Private context for the GAMG preconditioner */
9 typedef struct gamg_TAG {
10   PetscInt       m_dim;
11   PetscInt       m_Nlevels;
12   PetscInt       m_data_sz;
13   PetscReal     *m_data; /* blocked vector of vertex data on fine grid (coordinates) */
14 } PC_GAMG;
15 
16 #define TOP_GRID_LIM 100
17 
18 /* -----------------------------------------------------------------------------*/
19 #undef __FUNCT__
20 #define __FUNCT__ "PCReset_GAMG"
21 PetscErrorCode PCReset_GAMG(PC pc)
22 {
23   PetscErrorCode  ierr;
24   PC_MG           *mg = (PC_MG*)pc->data;
25   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
26 
27   PetscFunctionBegin;
28   ierr = PetscFree(pc_gamg->m_data);CHKERRQ(ierr);
29   PetscFunctionReturn(0);
30 }
31 
32 /* -------------------------------------------------------------------------- */
33 /*
34    PCSetCoordinates_GAMG
35 
36    Input Parameter:
37    .  pc - the preconditioner context
38 */
39 EXTERN_C_BEGIN
40 #undef __FUNCT__
41 #define __FUNCT__ "PCSetCoordinates_GAMG"
42 PetscErrorCode PCSetCoordinates_GAMG(PC pc, const int ndm,PetscReal *coords )
43 {
44   PC_MG          *mg = (PC_MG*)pc->data;
45   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
46   PetscErrorCode ierr;
47   PetscInt       bs, my0, tt;
48   Mat            mat = pc->pmat;
49   PetscInt       arrsz;
50 
51   PetscFunctionBegin;
52   ierr  = MatGetBlockSize( mat, &bs );               CHKERRQ( ierr );
53   ierr  = MatGetOwnershipRange( mat, &my0, &tt ); CHKERRQ(ierr);
54   arrsz = (tt-my0)/bs*ndm;
55 
56   // put coordinates
57   if (!pc_gamg->m_data || (pc_gamg->m_data_sz != arrsz)) {
58     ierr = PetscFree( pc_gamg->m_data );  CHKERRQ(ierr);
59     ierr = PetscMalloc(arrsz*sizeof(double), &pc_gamg->m_data ); CHKERRQ(ierr);
60   }
61 
62   /* copy data in */
63   for(tt=0;tt<arrsz;tt++){
64     pc_gamg->m_data[tt] = coords[tt];
65   }
66   pc_gamg->m_data_sz = arrsz;
67   pc_gamg->m_dim = ndm;
68 
69   PetscFunctionReturn(0);
70 }
71 EXTERN_C_END
72 
73 /* -------------------------------------------------------------------------- */
74 /*
75    partitionLevel
76 
77    Input Parameter:
78    . a_Amat_fine - matrix on this fine (k) level
79    . a_dime - 2 or 3
80    In/Output Parameter:
81    . a_P_inout - prolongation operator to the next level (k-1)
82    . a_coarse_crds - coordinates that need to be moved
83    . a_active_proc - number of active procs
84    Output Parameter:
85    . a_Amat_crs - coarse matrix that is created (k-1)
86 */
87 
88 #undef __FUNCT__
89 #define __FUNCT__ "partitionLevel"
90 PetscErrorCode partitionLevel( Mat a_Amat_fine,
91                                PetscInt a_dim,
92                                Mat *a_P_inout,
93                                PetscReal **a_coarse_crds,
94                                PetscMPIInt *a_active_proc,
95                                Mat *a_Amat_crs
96                             )
97 {
98   PetscErrorCode   ierr;
99   Mat              Amat, Pnew, Pold = *a_P_inout;
100   IS               new_indices,isnum;
101   MPI_Comm         wcomm = ((PetscObject)a_Amat_fine)->comm;
102   PetscMPIInt      nactive_procs,mype,npe;
103   PetscInt         Istart,Iend,Istart0,Iend0,ncrs0,ncrs_new,bs=1; /* bs ??? */
104 
105   PetscFunctionBegin;
106   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
107   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
108   /* RAP */
109   ierr = MatPtAP( a_Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Amat ); CHKERRQ(ierr);
110   ierr = MatGetOwnershipRange( Amat, &Istart0, &Iend0 );    CHKERRQ(ierr); /* x2 size of 'a_coarse_crds' */
111   ncrs0 = (Iend0 - Istart0)/bs;
112 
113   /* Repartition Amat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
114   {
115     PetscInt        neq,N,counts[npe];
116     IS              isnewproc;
117     PetscMPIInt     new_npe,targ_npe;
118 
119     ierr = MatGetSize( Amat, &neq, &N );CHKERRQ(ierr);
120 #define MIN_EQ_PROC 100
121     nactive_procs = *a_active_proc;
122     targ_npe = neq/MIN_EQ_PROC; /* hardwire min. number of eq/proc */
123     if( targ_npe == 0 || neq < TOP_GRID_LIM ) new_npe = 1; /* chop coarsest grid */
124     else if (targ_npe >= nactive_procs ) new_npe = nactive_procs; /* no change */
125     else {
126       PetscMPIInt     factstart,fact;
127       new_npe = -9999;
128       factstart = nactive_procs;
129       for(fact=factstart;fact>0;fact--){ /* try to find a better number of procs */
130         if( nactive_procs%fact==0 && neq/(nactive_procs/fact) > MIN_EQ_PROC ) {
131           new_npe = nactive_procs/fact;
132         }
133       }
134       assert(new_npe != -9999);
135     }
136     *a_active_proc = new_npe; /* output for next time */
137 PetscPrintf(PETSC_COMM_WORLD,"\t\t%s num active procs = %d (N loc = %d)\n",__FUNCT__,new_npe,ncrs0);
138     { /* partition: get 'isnewproc' */
139       MatPartitioning  mpart;
140       Mat              adj;
141       const PetscInt  *is_idx;
142       PetscInt         is_sz,kk,jj,old_fact=(npe/nactive_procs),new_fact=(npe/new_npe),*isnewproc_idx;
143       /* create sub communicator  */
144       MPI_Comm cm,new_comm;
145       int membershipKey = mype % old_fact;
146       ierr = MPI_Comm_split(wcomm, membershipKey, mype, &cm); CHKERRQ(ierr);
147       ierr = PetscCommDuplicate( cm, &new_comm, PETSC_NULL ); CHKERRQ(ierr);
148       ierr = MPI_Comm_free( &cm );                            CHKERRQ(ierr);
149 
150       /* MatPartitioningApply call MatConvert, which is collective */
151       ierr = MatConvert(Amat,MATMPIADJ,MAT_INITIAL_MATRIX,&adj);CHKERRQ(ierr);
152       if( membershipKey == 0 ) {
153         /* hack to fix global data that pmetis.c uses in 'adj' */
154         for( kk=0 , jj=0 ; kk<=npe ; jj++, kk += old_fact ) {
155           adj->rmap->range[jj] = adj->rmap->range[kk];
156         }
157         ierr = MatPartitioningCreate( new_comm, &mpart ); CHKERRQ(ierr);
158         ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
159         ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr);
160         ierr = MatPartitioningSetNParts( mpart, new_npe ); CHKERRQ(ierr);
161         ierr = MatPartitioningApply( mpart, &isnewproc ); CHKERRQ(ierr);
162         ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr);
163         /* collect IS info */
164         ierr = ISGetLocalSize( isnewproc, &is_sz );        CHKERRQ(ierr);
165         ierr = PetscMalloc( is_sz*sizeof(PetscInt), &isnewproc_idx ); CHKERRQ(ierr);
166         ierr = ISGetIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
167         for(kk=0;kk<is_sz;kk++){
168           /* spread partitioning across machine - probably the right thing to do but machine specific */
169           isnewproc_idx[kk] = is_idx[kk] * new_fact;
170         }
171         ierr = ISRestoreIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
172         ierr = ISDestroy( &isnewproc );                    CHKERRQ(ierr);
173       }
174       else {
175         isnewproc_idx = 0;
176         is_sz = 0;
177       }
178       ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
179       ierr = MPI_Comm_free( &new_comm );    CHKERRQ(ierr);
180       ierr = ISCreateGeneral( wcomm, is_sz, isnewproc_idx, PETSC_COPY_VALUES, &isnewproc );
181       if( membershipKey == 0 ) {
182         ierr = PetscFree( isnewproc_idx );  CHKERRQ(ierr);
183       }
184     }
185 
186     /*
187      Create an index set from the isnewproc index set to indicate the mapping TO
188      */
189     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
190     /*
191      Determine how many elements are assigned to each processor
192      */
193     ierr = ISPartitioningCount( isnewproc, npe, counts ); CHKERRQ(ierr);
194     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
195     ncrs_new = counts[mype];
196   }
197 
198   { /* Create a vector to contain the newly ordered element information */
199     const PetscInt *idx;
200     PetscInt        i,j,k;
201     IS              isscat;
202     PetscScalar    *array;
203     Vec             src_crd, dest_crd;
204     PetscReal      *coords = *a_coarse_crds;
205     VecScatter      vecscat;
206 
207     ierr = VecCreate( wcomm, &dest_crd );
208     ierr = VecSetSizes( dest_crd, a_dim*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
209     ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /*funny vector-get global options?*/
210     /*
211      There are three data items per cell (element), the integer vertex numbers of its three
212      coordinates (we convert to double to use the scatter) (one can think
213      of the vectors of having a block size of 3, then there is one index in idx[] for each element)
214      */
215     {
216       PetscInt tidx[ncrs0*a_dim];
217       ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
218       for (i=0,j=0; i<ncrs0; i++) {
219         for (k=0; k<a_dim; k++) tidx[j++] = idx[i]*a_dim + k;
220       }
221       ierr = ISCreateGeneral( wcomm, a_dim*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
222       CHKERRQ(ierr);
223       ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
224     }
225     /*
226      Create a vector to contain the original vertex information for each element
227      */
228     ierr = VecCreateSeq( PETSC_COMM_SELF, a_dim*ncrs0, &src_crd ); CHKERRQ(ierr);
229     ierr = VecGetArray( src_crd, &array );                        CHKERRQ(ierr);
230     for (i=0; i<a_dim*ncrs0; i++) array[i] = coords[i];
231     ierr = VecRestoreArray( src_crd, &array );                     CHKERRQ(ierr);
232     /*
233      Scatter the element vertex information (still in the original vertex ordering)
234      to the correct processor
235      */
236     ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
237     CHKERRQ(ierr);
238     ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
239     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
240     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
241     ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
242     ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
243     /*
244      Put the element vertex data into a new allocation of the gdata->ele
245      */
246     ierr = PetscFree( *a_coarse_crds );    CHKERRQ(ierr);
247     ierr = PetscMalloc( a_dim*ncrs_new*sizeof(PetscReal), a_coarse_crds );    CHKERRQ(ierr);
248     coords = *a_coarse_crds; /* convient */
249     ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
250     for (i=0; i<a_dim*ncrs_new; i++) coords[i] = PetscRealPart(array[i]);
251     ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
252     ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
253   }
254   /*
255    Invert for MatGetSubMatrix
256    */
257   ierr = ISInvertPermutation( isnum, ncrs_new, &new_indices ); CHKERRQ(ierr);
258   ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
259   ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
260   /* A_crs output */
261   ierr = MatGetSubMatrix( Amat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
262   CHKERRQ(ierr);
263   ierr = MatDestroy( &Amat ); CHKERRQ(ierr);
264   Amat = *a_Amat_crs;
265   /* prolongator */
266   ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
267   {
268     IS findices;
269     ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
270     ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
271     CHKERRQ(ierr);
272     ierr = ISDestroy( &findices ); CHKERRQ(ierr);
273   }
274   ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
275   *a_P_inout = Pnew; /* output */
276 
277   ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
278 
279   PetscFunctionReturn(0);
280 }
281 
282 #define GAMG_MAXLEVELS 20
283 #if defined(PETSC_USE_LOG)
284 PetscLogStage  gamg_stages[20];
285 #endif
286 /* -------------------------------------------------------------------------- */
287 /*
288    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
289                     by setting data structures and options.
290 
291    Input Parameter:
292 .  pc - the preconditioner context
293 
294    Application Interface Routine: PCSetUp()
295 
296    Notes:
297    The interface routine PCSetUp() is not usually called directly by
298    the user, but instead is called by PCApply() if necessary.
299 */
300 #undef __FUNCT__
301 #define __FUNCT__ "PCSetUp_GAMG"
302 PetscErrorCode PCSetUp_GAMG( PC pc )
303 {
304   PetscErrorCode  ierr;
305   PC_MG           *mg = (PC_MG*)pc->data;
306   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
307   Mat              Amat = pc->mat, Pmat = pc->pmat;
308   PetscBool        isSeq, isMPI;
309   PetscInt         fine_level, level, level1, M, N, bs, lidx;
310   MPI_Comm         wcomm = ((PetscObject)pc)->comm;
311   PetscMPIInt      mype,npe,nactivepe;
312   PetscBool isOK;
313 
314   Mat Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];  PetscReal *coarse_crds = 0, *crds = pc_gamg->m_data;
315 
316   PetscFunctionBegin;
317   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
318   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
319   if (pc->setupcalled){
320     /* no state data in GAMG to destroy (now) */
321     ierr = PCReset_MG(pc); CHKERRQ(ierr);
322   }
323   if (!pc_gamg->m_data) SETERRQ(wcomm,PETSC_ERR_SUP,"PCSetUp_GAMG called before PCSetCoordinates");
324   /* setup special features of PCGAMG */
325   ierr = PetscTypeCompare((PetscObject)Amat, MATSEQAIJ, &isSeq);CHKERRQ(ierr);
326   ierr = PetscTypeCompare((PetscObject)Amat, MATMPIAIJ, &isMPI);CHKERRQ(ierr);
327   if (isMPI) {
328   } else if (isSeq) {
329   } else SETERRQ1(wcomm,PETSC_ERR_ARG_WRONG, "Matrix type '%s' cannot be used with GAMG. GAMG can only handle AIJ matrices.",((PetscObject)Amat)->type_name);
330 
331   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
332   ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr);
333   if(bs!=1) SETERRQ1(wcomm,PETSC_ERR_ARG_WRONG, "GAMG only supports scalar prblems bs = '%d'.",bs);
334 
335   /* Get A_i and R_i */
336   ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr);
337 PetscPrintf(PETSC_COMM_WORLD,"[%d]%s level %d N=%d\n",0,__FUNCT__,0,N);
338   for (level=0, Aarr[0] = Pmat, nactivepe = npe;
339        level < GAMG_MAXLEVELS-1 && (level==0 || M>TOP_GRID_LIM); /* hard wired stopping logic */
340        level++ ) {
341     level1 = level + 1;
342     ierr = PetscLogEventBegin(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
343     ierr = createProlongation( Aarr[level], crds, pc_gamg->m_dim,
344                                &Parr[level1], &coarse_crds, &isOK );
345     CHKERRQ(ierr);
346     ierr = PetscLogEventEnd(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
347 
348     ierr = PetscFree( crds ); CHKERRQ( ierr );
349     if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */
350     if( isOK ) {
351       ierr = PetscLogEventBegin(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
352       ierr = partitionLevel( Aarr[level], pc_gamg->m_dim, &Parr[level1], &coarse_crds, &nactivepe, &Aarr[level1] );
353       CHKERRQ(ierr);
354       ierr = PetscLogEventEnd(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
355       ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
356 PetscPrintf(PETSC_COMM_WORLD,"[%d]%s done level %d N=%d, active pe = %d\n",0,__FUNCT__,level1,N,nactivepe);
357     }
358     else{
359       break;
360     }
361     crds = coarse_crds;
362   }
363   ierr = PetscFree( coarse_crds ); CHKERRQ( ierr );
364 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1);
365   pc_gamg->m_data = 0; /* destroyed coordinate data */
366   pc_gamg->m_Nlevels = level + 1;
367   fine_level = level;
368   ierr = PCMGSetLevels(pc,pc_gamg->m_Nlevels,PETSC_NULL);CHKERRQ(ierr);
369 
370   /* set default smoothers */
371   for (level=1,lidx=pc_gamg->m_Nlevels-2;
372        level<=fine_level;
373        level++,lidx--) {
374     PetscReal emax = 3.0, emin;
375     KSP smoother; PC subpc;
376     ierr = PCMGGetSmoother(pc,level,&smoother);CHKERRQ(ierr);
377     ierr = KSPSetType(smoother,KSPCHEBYCHEV);CHKERRQ(ierr);
378     ierr = KSPGetPC(smoother,&subpc);CHKERRQ(ierr);
379     ierr = PCSetType(subpc,PCJACOBI);CHKERRQ(ierr);
380     { /* eigen estimate 'emax' */
381       KSP eksp; Mat Amat = Aarr[lidx];
382       Vec bb, xx; PC pc;
383       PetscInt N1, N0, tt;
384       ierr = MatGetVecs( Amat, &bb, 0 );         CHKERRQ(ierr);
385       ierr = MatGetVecs( Amat, &xx, 0 );         CHKERRQ(ierr);
386       {
387 	PetscRandom    rctx;
388 	ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr);
389 	ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
390 	ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
391 	ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr);
392       }
393       ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr);
394       ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr);
395       ierr = KSPSetOperators( eksp, Amat, Amat, DIFFERENT_NONZERO_PATTERN ); CHKERRQ( ierr );
396       ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr );
397       ierr = PCSetType( pc, PCJACOBI ); CHKERRQ(ierr); /* should be same as above */
398       ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 5 );
399       CHKERRQ(ierr);
400       ierr = KSPSetConvergenceTest( eksp, KSPSkipConverged, 0, 0 ); CHKERRQ(ierr);
401       ierr = KSPSetNormType( eksp, KSP_NORM_NONE );                 CHKERRQ(ierr);
402 
403       ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr);
404       ierr = KSPSolve(eksp,bb,xx); CHKERRQ(ierr);
405       ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr);
406 
407       ierr = VecDestroy( &xx );       CHKERRQ(ierr);
408       ierr = VecDestroy( &bb );       CHKERRQ(ierr);
409       ierr = KSPDestroy( &eksp );       CHKERRQ(ierr);
410 
411       ierr = MatGetSize( Amat, &N1, &tt );         CHKERRQ(ierr);
412       ierr = MatGetSize( Aarr[lidx+1], &N0, &tt );CHKERRQ(ierr);
413       emax *= 1.05;
414       emin = emax/((PetscReal)N1/(PetscReal)N0); /* this should be about the coarsening rate */
415     }
416     ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr);
417   }
418   /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
419   ierr = PCSetFromOptions_MG(pc); CHKERRQ(ierr);
420   {
421     PetscBool galerkin;
422     ierr = PCMGGetGalerkin( pc,  &galerkin); CHKERRQ(ierr);
423     if(galerkin){
424       SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG.");
425     }
426   }
427   {
428     char str[32];
429     sprintf(str,"MG Level %d (%d)",0,pc_gamg->m_Nlevels-1);
430     PetscLogStageRegister(str, &gamg_stages[fine_level]);
431   }
432   /* create coarse level and the interpolation between the levels */
433   for (level=0,lidx=pc_gamg->m_Nlevels-1;
434        level<fine_level;
435        level++,lidx--){
436     level1 = level + 1;
437     ierr = PCMGSetInterpolation(pc,level1,Parr[lidx]);CHKERRQ(ierr);
438     if( !PETSC_TRUE ) {
439       PetscViewer viewer; char fname[32];
440       sprintf(fname,"Amat_%d.m",lidx);
441       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
442       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
443       ierr = MatView( Aarr[lidx], viewer ); CHKERRQ(ierr);
444       ierr = PetscViewerDestroy( &viewer );
445     }
446     ierr = MatDestroy( &Parr[lidx] );  CHKERRQ(ierr);
447     {
448       KSP smoother;
449       ierr = PCMGGetSmoother(pc,level,&smoother); CHKERRQ(ierr);
450       ierr = KSPSetOperators( smoother, Aarr[lidx], Aarr[lidx], DIFFERENT_NONZERO_PATTERN );
451       CHKERRQ(ierr);
452       ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
453     }
454     ierr = MatDestroy( &Aarr[lidx] );  CHKERRQ(ierr);
455     {
456       char str[32];
457       sprintf(str,"MG Level %d (%d)",level+1,lidx-1);
458       PetscLogStageRegister(str, &gamg_stages[lidx-1]);
459     }
460   }
461   { /* fine level (no P) */
462     KSP smoother;
463     ierr = PCMGGetSmoother(pc,fine_level,&smoother); CHKERRQ(ierr);
464     ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], DIFFERENT_NONZERO_PATTERN );
465     CHKERRQ(ierr);
466     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
467   }
468   /* setupcalled is set to 0 so that MG is setup from scratch */
469   pc->setupcalled = 0;
470   ierr = PCSetUp_MG(pc);CHKERRQ(ierr);
471   PetscFunctionReturn(0);
472 }
473 
474 /* -------------------------------------------------------------------------- */
475 /*
476    PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
477    that was created with PCCreate_GAMG().
478 
479    Input Parameter:
480 .  pc - the preconditioner context
481 
482    Application Interface Routine: PCDestroy()
483 */
484 #undef __FUNCT__
485 #define __FUNCT__ "PCDestroy_GAMG"
486 PetscErrorCode PCDestroy_GAMG(PC pc)
487 {
488   PetscErrorCode  ierr;
489   PC_MG           *mg = (PC_MG*)pc->data;
490   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
491 
492   PetscFunctionBegin;
493   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
494   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
495   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
496   PetscFunctionReturn(0);
497 }
498 
499 #undef __FUNCT__
500 #define __FUNCT__ "PCSetFromOptions_GAMG"
501 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
502 {
503   /* PetscErrorCode  ierr; */
504   /* PC_MG           *mg = (PC_MG*)pc->data; */
505   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
506   /* MPI_Comm        comm = ((PetscObject)pc)->comm; */
507 
508   PetscFunctionBegin;
509   PetscFunctionReturn(0);
510 }
511 
512 /* -------------------------------------------------------------------------- */
513 /*
514  PCCreate_GAMG - Creates a GAMG preconditioner context, PC_GAMG
515 
516    Input Parameter:
517 .  pc - the preconditioner context
518 
519    Application Interface Routine: PCCreate()
520 
521   */
522  /* MC
523      PCGAMG - Use algebraic multigrid preconditioning. This preconditioner requires you provide
524        fine grid discretization matrix and coordinates on the fine grid.
525 
526    Options Database Key:
527    Multigrid options(inherited)
528 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (MGSetCycles)
529 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (MGSetNumberSmoothUp)
530 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (MGSetNumberSmoothDown)
531    -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
532    GAMG options:
533 
534    Level: intermediate
535   Concepts: multigrid
536 
537 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
538            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), MPSetCycles(), PCMGSetNumberSmoothDown(),
539            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
540            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
541            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
542 M */
543 
544 EXTERN_C_BEGIN
545 #undef __FUNCT__
546 #define __FUNCT__ "PCCreate_GAMG"
547 PetscErrorCode  PCCreate_GAMG(PC pc)
548 {
549   PetscErrorCode  ierr;
550   PC_GAMG         *pc_gamg;
551   PC_MG           *mg;
552   PetscClassId     cookie;
553 
554   PetscFunctionBegin;
555   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
556   ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
557   ierr = PetscObjectChangeTypeName((PetscObject)pc,PCGAMG);CHKERRQ(ierr);
558 
559   /* create a supporting struct and attach it to pc */
560   ierr = PetscNewLog(pc,PC_GAMG,&pc_gamg);CHKERRQ(ierr);
561   mg = (PC_MG*)pc->data;
562   mg->innerctx = pc_gamg;
563 
564   pc_gamg->m_Nlevels    = -1;
565 
566   /* overwrite the pointers of PCMG by the functions of PCGAMG */
567   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
568   pc->ops->setup          = PCSetUp_GAMG;
569   pc->ops->reset          = PCReset_GAMG;
570   pc->ops->destroy        = PCDestroy_GAMG;
571 
572   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
573 					    "PCSetCoordinates_C",
574 					    "PCSetCoordinates_GAMG",
575 					    PCSetCoordinates_GAMG);CHKERRQ(ierr);
576 
577   PetscClassIdRegister("GAMG Setup",&cookie);
578   PetscLogEventRegister("GAMG-createProl", cookie, &gamg_setup_stages[SET1]);
579   PetscLogEventRegister("GAMG-partLevel", cookie, &gamg_setup_stages[SET2]);
580   PetscLogEventRegister("GAMG-MIS Graph", cookie, &gamg_setup_stages[SET3]);
581   PetscLogEventRegister("GAMG-MIS-Agg", cookie, &gamg_setup_stages[SET4]);
582   PetscLogEventRegister("GAMG-growSupp", cookie, &gamg_setup_stages[SET5]);
583   PetscLogEventRegister("GAMG-tri-Prol", cookie, &gamg_setup_stages[SET6]);
584   PetscLogEventRegister("GAMG-find-prol", cookie, &gamg_setup_stages[FIND_V]);
585 
586   PetscFunctionReturn(0);
587 }
588 EXTERN_C_END
589