xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 8f1a2a5e609bf3d0ec1c7e7bf09507165b68d274)
1 /*
2  GAMG geometric-algebric multiogrid PC - Mark Adams 2011
3  */
4 #include <../src/ksp/pc/impls/gamg/gamg.h>
5 
6 PetscLogEvent gamg_setup_stages[NUM_SET];
7 
8 /* Private context for the GAMG preconditioner */
9 typedef struct gamg_TAG {
10   PetscInt       m_dim;
11   PetscInt       m_Nlevels;
12   PetscInt       m_data_sz;
13   PetscReal     *m_data; /* blocked vector of vertex data on fine grid (coordinates) */
14 } PC_GAMG;
15 
16 #define TOP_GRID_LIM 100
17 
18 /* -----------------------------------------------------------------------------*/
19 #undef __FUNCT__
20 #define __FUNCT__ "PCReset_GAMG"
21 PetscErrorCode PCReset_GAMG(PC pc)
22 {
23   PetscErrorCode  ierr;
24   PC_MG           *mg = (PC_MG*)pc->data;
25   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
26 
27   PetscFunctionBegin;
28   ierr = PetscFree(pc_gamg->m_data);CHKERRQ(ierr);
29   pc_gamg->m_data = 0; pc_gamg->m_data_sz = 0;
30   PetscFunctionReturn(0);
31 }
32 
33 /* -------------------------------------------------------------------------- */
34 /*
35    PCSetCoordinates_GAMG
36 
37    Input Parameter:
38    .  pc - the preconditioner context
39 */
40 EXTERN_C_BEGIN
41 #undef __FUNCT__
42 #define __FUNCT__ "PCSetCoordinates_GAMG"
43 PetscErrorCode PCSetCoordinates_GAMG( PC a_pc, PetscInt a_ndm, PetscReal *a_coords )
44 {
45   PC_MG          *mg = (PC_MG*)a_pc->data;
46   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
47   PetscErrorCode ierr;
48   PetscInt       arrsz,bs,my0,kk,ii,jj,nloc,sz,M;
49   Mat            Amat = a_pc->pmat;
50   PetscBool      useSA = PETSC_FALSE, flag;
51   char           str[16];
52 
53   PetscFunctionBegin;
54   ierr  = MatGetBlockSize( Amat, &bs );               CHKERRQ( ierr );
55   ierr  = MatGetOwnershipRange( Amat, &my0, &kk ); CHKERRQ(ierr);
56   nloc = (kk-my0)/bs; M = kk - my0;
57   if((kk-my0)%bs!=0) SETERRQ1(((PetscObject)Amat)->comm,PETSC_ERR_ARG_WRONG, "Bad local size %d.",nloc);
58 
59   ierr  = PetscOptionsGetString(PETSC_NULL,"-pc_gamg_type",str,16,&flag);    CHKERRQ( ierr );
60   useSA = (PetscBool)(flag && strcmp(str,"sa") == 0);
61 
62   if(a_coords == 0) useSA = PETSC_TRUE; /* use SA if no data */
63   if( !useSA ) sz = a_ndm; /* coordinates */
64   else{ /* SA: null space vectors */
65     if(a_coords != 0) sz = (a_ndm==2 ? 3*bs : 6*bs); /* elasticity */
66     else sz = bs*bs; /* no data, force SA with constant null space vectors */
67     if(a_coords != 0 && bs==1 ) sz = 1; /* scalar problem with coords and SA (not needed) */
68   }
69   arrsz = nloc*sz;
70 
71   /* create data - syntactic sugar that should be refactored at some point */
72   if (!pc_gamg->m_data || (pc_gamg->m_data_sz != arrsz)) {
73     ierr = PetscFree( pc_gamg->m_data );  CHKERRQ(ierr);
74     ierr = PetscMalloc((arrsz+1)*sizeof(double), &pc_gamg->m_data ); CHKERRQ(ierr);
75   }
76   for(kk=0;kk<arrsz;kk++)pc_gamg->m_data[kk] = -999.;
77   pc_gamg->m_data[arrsz] = -99.;
78   /* copy data in - column oriented */
79   if( useSA ) {
80     for(kk=0;kk<nloc;kk++){
81       PetscReal *data = &pc_gamg->m_data[kk*bs];
82       if( sz==1 ) *data = 1.0;
83       else {
84         for(ii=0;ii<bs;ii++)
85 	  for(jj=0;jj<bs;jj++)
86 	    if(ii==jj)data[ii*M + jj] = 1.0; /* translational modes */
87 	    else data[ii*M + jj] = 0.0;
88         if( a_coords != 0 ) {
89           if( a_ndm == 2 ){ /* rotational modes */
90             data += 2*M;
91             data[0] = -a_coords[2*kk+1];
92             data[1] =  a_coords[2*kk];
93           }
94           else {
95             data += 3*M;
96             data[0] = 0.0;               data[M+0] =  a_coords[3*kk+2]; data[2*M+0] = -a_coords[3*kk+1];
97             data[1] = -a_coords[3*kk+2]; data[M+1] = 0.0;               data[2*M+1] =  a_coords[3*kk];
98             data[2] =  a_coords[3*kk+1]; data[M+2] = -a_coords[3*kk];   data[2*M+2] = 0.0;
99           }
100         }
101       }
102     }
103   }
104   else {
105     for( kk = 0 ; kk < nloc ; kk++ ){
106       for( ii = 0 ; ii < a_ndm ; ii++ ) {
107         pc_gamg->m_data[ii*nloc + kk] =  a_coords[kk*a_ndm + ii];
108       }
109     }
110   }
111 
112   assert(pc_gamg->m_data[arrsz] == -99.);
113   for(kk=0;kk<arrsz;kk++){
114     assert(pc_gamg->m_data[kk] != -999.); // debug
115   }
116 
117   pc_gamg->m_data_sz = arrsz;
118   pc_gamg->m_dim = a_ndm;
119 
120   PetscFunctionReturn(0);
121 }
122 EXTERN_C_END
123 
124 /* -------------------------------------------------------------------------- */
125 /*
126    partitionLevel
127 
128    Input Parameter:
129    . a_Amat_fine - matrix on this fine (k) level
130    . a_data_sz - size of data to move (coarse grid)
131    In/Output Parameter:
132    . a_P_inout - prolongation operator to the next level (k-1)
133    . a_coarse_data - data that need to be moved
134    . a_active_proc - number of active procs
135    Output Parameter:
136    . a_Amat_crs - coarse matrix that is created (k-1)
137 */
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "partitionLevel"
141 PetscErrorCode partitionLevel( Mat a_Amat_fine,
142                                PetscInt a_data_sz,
143 			       PetscInt a_cbs,
144                                Mat *a_P_inout,
145                                PetscReal **a_coarse_data,
146                                PetscMPIInt *a_active_proc,
147                                Mat *a_Amat_crs
148                                )
149 {
150   PetscErrorCode   ierr;
151   Mat              Cmat,Pnew,Pold=*a_P_inout;
152   IS               new_indices,isnum;
153   MPI_Comm         wcomm = ((PetscObject)a_Amat_fine)->comm;
154   PetscMPIInt      nactive_procs,mype,npe;
155   PetscInt         Istart,Iend,Istart0,Iend0,ncrs0,ncrs_new,fbs;
156 
157   PetscFunctionBegin;
158   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
159   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
160   ierr = MatGetBlockSize( a_Amat_fine, &fbs ); CHKERRQ(ierr);
161   /* RAP */
162   ierr = MatPtAP( a_Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
163   ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
164 
165   ierr = MatGetOwnershipRange( Cmat, &Istart0, &Iend0 ); CHKERRQ(ierr);
166   ncrs0 = (Iend0-Istart0)/a_cbs; assert( (Iend0-Istart0)%a_cbs == 0 );
167 
168   /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
169   {
170     PetscInt        neq,NN,counts[npe];
171     IS              isnewproc;
172     PetscMPIInt     new_npe,targ_npe;
173 
174     ierr = MatGetSize( Cmat, &neq, &NN );CHKERRQ(ierr);
175 #define MIN_EQ_PROC 100
176     nactive_procs = *a_active_proc;
177     targ_npe = neq/MIN_EQ_PROC; /* hardwire min. number of eq/proc */
178     if( targ_npe == 0 || neq < TOP_GRID_LIM ) new_npe = 1; /* chop coarsest grid */
179     else if (targ_npe >= nactive_procs ) new_npe = nactive_procs; /* no change */
180     else {
181       PetscMPIInt     factstart,fact;
182       new_npe = -9999;
183       factstart = nactive_procs;
184       for(fact=factstart;fact>0;fact--){ /* try to find a better number of procs */
185         if( nactive_procs%fact==0 && neq/(nactive_procs/fact) > MIN_EQ_PROC ) {
186           new_npe = nactive_procs/fact;
187         }
188       }
189       if(new_npe == -9999) new_npe = nactive_procs;
190     }
191     *a_active_proc = new_npe; /* output for next time */
192     { /* partition: get 'isnewproc' */
193       MatPartitioning  mpart;
194       Mat              adj;
195       const PetscInt  *is_idx;
196       PetscInt         is_sz,kk,jj,ii,old_fact=(npe/nactive_procs),new_fact=(npe/new_npe),*isnewproc_idx;
197       /* create sub communicator  */
198       MPI_Comm cm,new_comm;
199       int membershipKey = mype % old_fact;
200       ierr = MPI_Comm_split(wcomm, membershipKey, mype, &cm); CHKERRQ(ierr);
201       ierr = PetscCommDuplicate( cm, &new_comm, PETSC_NULL ); CHKERRQ(ierr);
202       ierr = MPI_Comm_free( &cm );                            CHKERRQ(ierr);
203 
204       /* MatPartitioningApply call MatConvert, which is collective */
205       if( a_cbs==1) {
206         ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
207       }
208       else {
209 	/* make a scalar matrix to partition */
210         Mat tMat;
211         PetscInt Ii,ncols; const PetscScalar *vals; const PetscInt *idx;
212         ierr = MatCreateMPIAIJ( wcomm, ncrs0, ncrs0,
213                                 PETSC_DETERMINE, PETSC_DETERMINE,
214                                 25, PETSC_NULL, 10, PETSC_NULL,
215                                 &tMat );
216 
217         for ( Ii = Istart0; Ii < Iend0; Ii++ ) {
218           PetscInt dest_row = Ii/a_cbs;
219           ierr = MatGetRow(Cmat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr);
220           for( jj = 0 ; jj < ncols ; jj++ ){
221             PetscInt dest_col = idx[jj]/a_cbs;
222             PetscScalar v = 1.0;
223             ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr);
224           }
225           ierr = MatRestoreRow(Cmat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr);
226         }
227         ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
228         ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
229 
230         ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
231 
232         ierr = MatDestroy( &tMat );  CHKERRQ(ierr);
233       }
234       if( membershipKey == 0 ) {
235 	if(ncrs0==0)SETERRQ(wcomm,PETSC_ERR_LIB,"zero local nodes -- increase min");
236         /* hack to fix global data that pmetis.c uses in 'adj' */
237         for( kk=0 , jj=0 ; kk<=npe ; jj++, kk += old_fact ) {
238           adj->rmap->range[jj] = adj->rmap->range[kk];
239         }
240         ierr = MatPartitioningCreate( new_comm, &mpart ); CHKERRQ(ierr);
241         ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
242         ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr);
243         ierr = MatPartitioningSetNParts( mpart, new_npe ); CHKERRQ(ierr);
244         ierr = MatPartitioningApply( mpart, &isnewproc ); CHKERRQ(ierr);
245         ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr);
246         /* collect IS info */
247         ierr = ISGetLocalSize( isnewproc, &is_sz );        CHKERRQ(ierr);
248         ierr = PetscMalloc( a_cbs*is_sz*sizeof(PetscInt), &isnewproc_idx ); CHKERRQ(ierr);
249         ierr = ISGetIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
250         /* spread partitioning across machine - probably the right thing to do but machine spec. */
251         for(kk=0,jj=0;kk<is_sz;kk++){
252           for(ii=0 ; ii<a_cbs ; ii++, jj++ ) { /* expand for equation level by 'bs' */
253             isnewproc_idx[jj] = is_idx[kk] * new_fact;
254           }
255         }
256         ierr = ISRestoreIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
257         ierr = ISDestroy( &isnewproc );                    CHKERRQ(ierr);
258         is_sz *= a_cbs;
259       }
260       else {
261         isnewproc_idx = 0;
262         is_sz = 0;
263       }
264       ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
265       ierr = MPI_Comm_free( &new_comm );    CHKERRQ(ierr);
266       ierr = ISCreateGeneral( wcomm, is_sz, isnewproc_idx, PETSC_COPY_VALUES, &isnewproc );
267       if( membershipKey == 0 ) {
268         ierr = PetscFree( isnewproc_idx );  CHKERRQ(ierr);
269       }
270     }
271     /*
272      Create an index set from the isnewproc index set to indicate the mapping TO
273      */
274     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
275     /*
276      Determine how many elements are assigned to each processor
277      */
278     ierr = ISPartitioningCount( isnewproc, npe, counts ); CHKERRQ(ierr);
279     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
280     ncrs_new = counts[mype]/a_cbs;
281   }
282 
283   { /* Create a vector to contain the newly ordered element information */
284     const PetscInt *idx;
285     PetscInt        ii,jj,kk;
286     IS              isscat;
287     PetscScalar    *array;
288     Vec             src_crd, dest_crd;
289     PetscReal      *data = *a_coarse_data;
290     VecScatter      vecscat;
291     PetscInt        tidx[ncrs0*a_data_sz];
292 
293     ierr = VecCreate( wcomm, &dest_crd );
294     ierr = VecSetSizes( dest_crd, a_data_sz*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
295     ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /*funny vector-get global options?*/
296     /*
297      There are 'a_data_sz' data items per node, (one can think of the vectors of having a
298      block size of 'a_data_sz').  Note, ISs are expanded into equation space by 'a_cbs'.
299      */
300     ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
301     for(ii=0,jj=0; ii<ncrs0 ; ii++) {
302       PetscInt id = idx[ii*a_cbs]/a_cbs;
303       for( kk=0; kk<a_data_sz ; kk++, jj++) tidx[jj] = id*a_data_sz + kk;
304     }
305     ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
306     ierr = ISCreateGeneral( wcomm, a_data_sz*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
307     CHKERRQ(ierr);
308     /*
309      Create a vector to contain the original vertex information for each element
310      */
311     ierr = VecCreateSeq( PETSC_COMM_SELF, a_data_sz*ncrs0, &src_crd ); CHKERRQ(ierr);
312     for(ii=0, jj=0; ii<ncrs0 ; ii++) {
313       for( kk=0; kk < a_data_sz ; kk++, jj++) {
314         ierr = VecSetValues( src_crd, 1, &jj, &data[kk*ncrs0 + ii], INSERT_VALUES );  CHKERRQ(ierr);
315       }
316     }
317     ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr);
318     ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr);
319     /*
320      Scatter the element vertex information (still in the original vertex ordering)
321      to the correct processor
322      */
323     ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
324     CHKERRQ(ierr);
325     ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
326     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
327     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
328     ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
329     ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
330     /*
331      Put the element vertex data into a new allocation of the gdata->ele
332      */
333     ierr = PetscFree( *a_coarse_data );    CHKERRQ(ierr);
334     ierr = PetscMalloc( a_data_sz*ncrs_new*sizeof(PetscReal), a_coarse_data );    CHKERRQ(ierr);
335     VecGetLocalSize( dest_crd, &ii ); assert(ii==a_data_sz*ncrs_new);
336 
337     ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
338     data = *a_coarse_data;
339     for(ii=0, jj=0; ii<ncrs_new ; ii++) {
340       for( kk=0; kk < a_data_sz ; kk++, jj++) {
341         data[kk*ncrs_new + ii] = PetscRealPart(array[jj]);
342       }
343     }
344     ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
345     ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
346   }
347   /*
348    Invert for MatGetSubMatrix
349    */
350   ierr = ISInvertPermutation( isnum, ncrs_new*a_cbs, &new_indices ); CHKERRQ(ierr);
351   ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
352   ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
353   /* A_crs output */
354   ierr = MatGetSubMatrix( Cmat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
355   CHKERRQ(ierr);
356 
357   ierr = MatDestroy( &Cmat ); CHKERRQ(ierr);
358   Cmat = *a_Amat_crs;
359   ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
360 
361   /* prolongator */
362   ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
363   {
364     IS findices;
365     ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
366     ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
367     CHKERRQ(ierr);
368     ierr = ISDestroy( &findices ); CHKERRQ(ierr);
369   }
370   ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
371   *a_P_inout = Pnew; /* output */
372   ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
373 
374   PetscFunctionReturn(0);
375 }
376 
377 #define GAMG_MAXLEVELS 2
378 #if defined(PETSC_USE_LOG)
379 PetscLogStage  gamg_stages[20];
380 #endif
381 /* -------------------------------------------------------------------------- */
382 /*
383    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
384                     by setting data structures and options.
385 
386    Input Parameter:
387 .  pc - the preconditioner context
388 
389    Application Interface Routine: PCSetUp()
390 
391    Notes:
392    The interface routine PCSetUp() is not usually called directly by
393    the user, but instead is called by PCApply() if necessary.
394 */
395 #undef __FUNCT__
396 #define __FUNCT__ "PCSetUp_GAMG"
397 PetscErrorCode PCSetUp_GAMG( PC a_pc )
398 {
399   PetscErrorCode  ierr;
400   PC_MG           *mg = (PC_MG*)a_pc->data;
401   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
402   Mat              Amat = a_pc->mat, Pmat = a_pc->pmat;
403   PetscInt         fine_level, level, level1, M, N, bs, nloc, lidx, data_sz, Istart, Iend;
404   MPI_Comm         wcomm = ((PetscObject)a_pc)->comm;
405   PetscMPIInt      mype,npe,nactivepe;
406   PetscBool        isOK;
407   Mat              Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];
408   PetscReal       *coarse_data = 0, *data, emaxs[GAMG_MAXLEVELS];
409 
410   PetscFunctionBegin;
411   if( a_pc->setupcalled ) {
412     /* no state data in GAMG to destroy */
413     ierr = PCReset_MG( a_pc ); CHKERRQ(ierr);
414   }
415   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
416   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
417   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
418   ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr);
419   ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr);
420   ierr = MatGetOwnershipRange( Amat, &Istart, &Iend ); CHKERRQ(ierr);
421   nloc = (Iend-Istart)/bs; assert((Iend-Istart)%bs == 0);
422 
423   /* get data of not around */
424   if( pc_gamg->m_data == 0 && nloc > 0 ) {
425     ierr  = PCSetCoordinates_GAMG( a_pc, -1, 0 );    CHKERRQ( ierr );
426   }
427   data = pc_gamg->m_data;
428   data_sz = pc_gamg->m_data_sz/nloc; assert(data!=0 || data_sz==0);
429 
430   /* Get A_i and R_i */
431 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s level %d N=%d, data size=%d m_data_sz=%d nloc=%d\n",0,__FUNCT__,0,N,data_sz,pc_gamg->m_data_sz,nloc);
432   for ( level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */
433         level < GAMG_MAXLEVELS-1 && (level==0 || M/bs>TOP_GRID_LIM) && (npe==1 || nactivepe>1);
434         level++ ) {
435     level1 = level + 1;
436     ierr = PetscLogEventBegin(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
437     ierr = createProlongation( Aarr[level], data, pc_gamg->m_dim, &bs, &data_sz,
438                                &Parr[level1], &coarse_data, &isOK, &emaxs[level] );
439     CHKERRQ(ierr);
440     ierr = PetscLogEventEnd(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
441 
442     ierr = PetscFree( data ); CHKERRQ( ierr );
443     if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */
444     if( isOK ) {
445       ierr = PetscLogEventBegin(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
446       ierr = partitionLevel( Aarr[level], data_sz, bs,
447                              &Parr[level1], &coarse_data, &nactivepe, &Aarr[level1] );
448       CHKERRQ(ierr);
449       ierr = PetscLogEventEnd(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
450       ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
451 PetscPrintf(PETSC_COMM_WORLD,"\t\t[%d]%s %d) N=%d, %d data size, %d active pes\n",0,__FUNCT__,level1,N,data_sz,nactivepe);
452     }
453     else{
454       break;
455     }
456     data = coarse_data;
457   }
458   ierr = PetscFree( coarse_data ); CHKERRQ( ierr );
459 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1);
460   pc_gamg->m_data = 0; /* destroyed coordinate data */
461   pc_gamg->m_Nlevels = level + 1;
462   fine_level = level;
463   ierr = PCMGSetLevels(a_pc,pc_gamg->m_Nlevels,PETSC_NULL);CHKERRQ(ierr);
464 
465   /* set default smoothers */
466   for ( lidx=1, level = pc_gamg->m_Nlevels-2;
467         lidx <= fine_level;
468         lidx++, level--) {
469     PetscReal emax, emin;
470     KSP smoother; PC subpc;
471     ierr = PCMGGetSmoother( a_pc, lidx, &smoother ); CHKERRQ(ierr);
472     ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr);
473     if( emaxs[level] > 0.0 ) emax = emaxs[level];
474     else{ /* eigen estimate 'emax' */
475       KSP eksp; Mat Lmat = Aarr[level];
476       Vec bb, xx; PC pc;
477 
478       ierr = MatGetVecs( Lmat, &bb, 0 );         CHKERRQ(ierr);
479       ierr = MatGetVecs( Lmat, &xx, 0 );         CHKERRQ(ierr);
480       {
481 	PetscRandom    rctx;
482 	ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr);
483 	ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
484 	ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
485 	ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr);
486       }
487       ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr);
488       ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr);
489       ierr = KSPSetOperators( eksp, Lmat, Lmat, DIFFERENT_NONZERO_PATTERN ); CHKERRQ( ierr );
490       ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr );
491       ierr = PCSetType( pc, PCJACOBI ); CHKERRQ(ierr); /* should be same as above */
492       ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 );
493       CHKERRQ(ierr);
494       ierr = KSPSetConvergenceTest( eksp, KSPSkipConverged, 0, 0 ); CHKERRQ(ierr);
495       ierr = KSPSetNormType( eksp, KSP_NORM_NONE );                 CHKERRQ(ierr);
496 
497       ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr);
498       ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr);
499       ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr);
500       ierr = VecDestroy( &xx );       CHKERRQ(ierr);
501       ierr = VecDestroy( &bb );       CHKERRQ(ierr);
502       ierr = KSPDestroy( &eksp );       CHKERRQ(ierr);
503     }
504     {
505       PetscInt N1, N0, tt;
506       ierr = MatGetSize( Aarr[level], &N1, &tt );         CHKERRQ(ierr);
507       ierr = MatGetSize( Aarr[level+1], &N0, &tt );       CHKERRQ(ierr);
508       emin = 1.5*emax/((PetscReal)N1/(PetscReal)N0); /* this should be about the coarsening rate */
509       PetscPrintf(PETSC_COMM_WORLD,"\t\t\t%s max eigen = %e min = %e (N=%d)\n",__FUNCT__,emax,emin,N1/bs);
510       emax *= 1.05;
511     }
512 
513     ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], DIFFERENT_NONZERO_PATTERN );
514     ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr);
515     ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
516     ierr = PCSetType( subpc, PCJACOBI ); CHKERRQ(ierr);
517     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
518   }
519   {
520     KSP smoother; /* coarse grid */
521     Mat Lmat = Aarr[pc_gamg->m_Nlevels-1];
522     ierr = PCMGGetSmoother( a_pc, 0, &smoother ); CHKERRQ(ierr);
523     ierr = KSPSetOperators( smoother, Lmat, Lmat, DIFFERENT_NONZERO_PATTERN );
524     CHKERRQ(ierr);
525     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
526   }
527   /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
528   ierr = PCSetFromOptions_MG(a_pc); CHKERRQ(ierr);
529   {
530     PetscBool galerkin;
531     ierr = PCMGGetGalerkin( a_pc,  &galerkin); CHKERRQ(ierr);
532     if(galerkin){
533       SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG.");
534     }
535   }
536 
537   /* set interpolation between the levels, create timer stages, clean up */
538   if( PETSC_FALSE ) {
539     char str[32];
540     sprintf(str,"MG Level %d (%d)",0,pc_gamg->m_Nlevels-1);
541     PetscLogStageRegister(str, &gamg_stages[fine_level]);
542   }
543   for (lidx=0,level=pc_gamg->m_Nlevels-1;
544        lidx<fine_level;
545        lidx++, level--){
546     ierr = PCMGSetInterpolation( a_pc, lidx+1, Parr[level] );CHKERRQ(ierr);
547     if( !PETSC_TRUE ) {
548       PetscViewer viewer; char fname[32];
549       sprintf(fname,"Pmat_%d.m",level);
550       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
551       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
552       ierr = MatView( Parr[level], viewer ); CHKERRQ(ierr);
553       ierr = PetscViewerDestroy( &viewer );
554     }
555     ierr = MatDestroy( &Parr[level] );  CHKERRQ(ierr);
556     ierr = MatDestroy( &Aarr[level] );  CHKERRQ(ierr);
557     if( PETSC_FALSE ) {
558       char str[32];
559       sprintf(str,"MG Level %d (%d)",lidx+1,level-1);
560       PetscLogStageRegister(str, &gamg_stages[level-1]);
561     }
562   }
563 
564   /* setupcalled is set to 0 so that MG is setup from scratch */
565   a_pc->setupcalled = 0;
566   ierr = PCSetUp_MG(a_pc);CHKERRQ(ierr);
567   PetscFunctionReturn(0);
568 }
569 
570 /* ------------------------------------------------------------------------- */
571 /*
572    PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
573    that was created with PCCreate_GAMG().
574 
575    Input Parameter:
576 .  pc - the preconditioner context
577 
578    Application Interface Routine: PCDestroy()
579 */
580 #undef __FUNCT__
581 #define __FUNCT__ "PCDestroy_GAMG"
582 PetscErrorCode PCDestroy_GAMG(PC pc)
583 {
584   PetscErrorCode  ierr;
585   PC_MG           *mg = (PC_MG*)pc->data;
586   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
587 
588   PetscFunctionBegin;
589   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
590   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
591   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
592   PetscFunctionReturn(0);
593 }
594 
595 #undef __FUNCT__
596 #define __FUNCT__ "PCSetFromOptions_GAMG"
597 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
598 {
599   /* PetscErrorCode  ierr; */
600   /* PC_MG           *mg = (PC_MG*)pc->data; */
601   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
602   /* MPI_Comm        comm = ((PetscObject)pc)->comm; */
603 
604   PetscFunctionBegin;
605   PetscFunctionReturn(0);
606 }
607 
608 /* -------------------------------------------------------------------------- */
609 /*
610  PCCreate_GAMG - Creates a GAMG preconditioner context, PC_GAMG
611 
612    Input Parameter:
613 .  pc - the preconditioner context
614 
615    Application Interface Routine: PCCreate()
616 
617   */
618  /* MC
619      PCGAMG - Use algebraic multigrid preconditioning. This preconditioner requires you provide
620        fine grid discretization matrix and coordinates on the fine grid.
621 
622    Options Database Key:
623    Multigrid options(inherited)
624 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (MGSetCycles)
625 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (MGSetNumberSmoothUp)
626 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (MGSetNumberSmoothDown)
627    -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
628    GAMG options:
629 
630    Level: intermediate
631   Concepts: multigrid
632 
633 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
634            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), MPSetCycles(), PCMGSetNumberSmoothDown(),
635            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
636            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
637            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
638 M */
639 
640 EXTERN_C_BEGIN
641 #undef __FUNCT__
642 #define __FUNCT__ "PCCreate_GAMG"
643 PetscErrorCode  PCCreate_GAMG(PC pc)
644 {
645   PetscErrorCode  ierr;
646   PC_GAMG         *pc_gamg;
647   PC_MG           *mg;
648   PetscClassId     cookie;
649 
650   PetscFunctionBegin;
651   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
652   ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
653   ierr = PetscObjectChangeTypeName((PetscObject)pc,PCGAMG);CHKERRQ(ierr);
654 
655   /* create a supporting struct and attach it to pc */
656   ierr = PetscNewLog(pc,PC_GAMG,&pc_gamg);CHKERRQ(ierr);
657   mg = (PC_MG*)pc->data;
658   mg->innerctx = pc_gamg;
659 
660   pc_gamg->m_Nlevels    = -1;
661 
662   /* overwrite the pointers of PCMG by the functions of PCGAMG */
663   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
664   pc->ops->setup          = PCSetUp_GAMG;
665   pc->ops->reset          = PCReset_GAMG;
666   pc->ops->destroy        = PCDestroy_GAMG;
667 
668   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
669 					    "PCSetCoordinates_C",
670 					    "PCSetCoordinates_GAMG",
671 					    PCSetCoordinates_GAMG);CHKERRQ(ierr);
672 
673   PetscClassIdRegister("GAMG Setup",&cookie);
674   PetscLogEventRegister("GAMG-createProl", cookie, &gamg_setup_stages[SET1]);
675   PetscLogEventRegister("GAMG-partLevel", cookie, &gamg_setup_stages[SET2]);
676   PetscLogEventRegister("GAMG-MIS Graph", cookie, &gamg_setup_stages[SET3]);
677   PetscLogEventRegister("GAMG-MIS-Agg", cookie, &gamg_setup_stages[SET4]);
678   PetscLogEventRegister("GAMG-growSupp", cookie, &gamg_setup_stages[SET5]);
679   PetscLogEventRegister("GAMG-tri-Prol", cookie, &gamg_setup_stages[SET6]);
680   PetscLogEventRegister("GAMG-find-prol", cookie, &gamg_setup_stages[FIND_V]);
681 
682   PetscFunctionReturn(0);
683 }
684 EXTERN_C_END
685