xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision d461ba9780c55a0df6dc85ac9e4e0589fbf99e79)
1 /*
2  GAMG geometric-algebric multiogrid PC - Mark Adams 2011
3  */
4 #include <../src/ksp/pc/impls/gamg/gamg.h>
5 
6 PetscLogEvent gamg_setup_stages[NUM_SET];
7 
8 /* Private context for the GAMG preconditioner */
9 typedef struct gamg_TAG {
10   PetscInt       m_dim;
11   PetscInt       m_Nlevels;
12   PetscInt       m_data_sz;
13   PetscInt       m_data_rows;
14   PetscInt       m_data_cols;
15   PetscBool      m_useSA;
16   PetscReal     *m_data; /* blocked vector of vertex data on fine grid (coordinates) */
17 } PC_GAMG;
18 
19 /* -------------------------------------------------------------------------- */
20 /*
21    PCSetCoordinates_GAMG
22 
23    Input Parameter:
24    .  pc - the preconditioner context
25 */
26 EXTERN_C_BEGIN
27 #undef __FUNCT__
28 #define __FUNCT__ "PCSetCoordinates_GAMG"
29 PetscErrorCode PCSetCoordinates_GAMG( PC a_pc, PetscInt a_ndm, PetscReal *a_coords )
30 {
31   PC_MG          *mg = (PC_MG*)a_pc->data;
32   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
33   PetscErrorCode ierr;
34   PetscInt       arrsz,bs,my0,kk,ii,jj,nloc,Iend;
35   Mat            Amat = a_pc->pmat;
36   PetscBool      flag;
37   char           str[16];
38 
39   PetscFunctionBegin;
40   ierr  = MatGetBlockSize( Amat, &bs );               CHKERRQ( ierr );
41   ierr  = MatGetOwnershipRange( Amat, &my0, &Iend ); CHKERRQ(ierr);
42   nloc = (Iend-my0)/bs;
43   if((Iend-my0)%bs!=0) SETERRQ1(((PetscObject)Amat)->comm,PETSC_ERR_ARG_WRONG, "Bad local size %d.",nloc);
44 
45   ierr  = PetscOptionsGetString(PETSC_NULL,"-pc_gamg_type",str,16,&flag);    CHKERRQ( ierr );
46   pc_gamg->m_useSA = (PetscBool)(flag && strcmp(str,"sa") == 0);
47 
48   pc_gamg->m_data_rows = 1;
49   if(a_coords == 0) pc_gamg->m_useSA = PETSC_TRUE; /* use SA if no data */
50   if( !pc_gamg->m_useSA ) pc_gamg->m_data_cols = a_ndm; /* coordinates */
51   else{ /* SA: null space vectors */
52     if(a_coords != 0 && bs==1 ) pc_gamg->m_data_cols = 1; /* scalar w/ coords and SA (not needed) */
53     else if(a_coords != 0) pc_gamg->m_data_cols = (a_ndm==2 ? 3 : 6); /* elasticity */
54     else pc_gamg->m_data_cols = bs; /* no data, force SA with constant null space vectors */
55     pc_gamg->m_data_rows = bs;
56   }
57   arrsz = nloc*pc_gamg->m_data_rows*pc_gamg->m_data_cols;
58 
59   /* create data - syntactic sugar that should be refactored at some point */
60   if (!pc_gamg->m_data || (pc_gamg->m_data_sz != arrsz)) {
61     ierr = PetscFree( pc_gamg->m_data );  CHKERRQ(ierr);
62     ierr = PetscMalloc((arrsz+1)*sizeof(double), &pc_gamg->m_data ); CHKERRQ(ierr);
63   }
64   for(kk=0;kk<arrsz;kk++)pc_gamg->m_data[kk] = -999.;
65   pc_gamg->m_data[arrsz] = -99.;
66   /* copy data in - column oriented */
67   if( pc_gamg->m_useSA ) {
68     const PetscInt M = Iend - my0;
69     for(kk=0;kk<nloc;kk++){
70       PetscReal *data = &pc_gamg->m_data[kk*bs];
71       if( pc_gamg->m_data_cols==1 ) *data = 1.0;
72       else {
73         for(ii=0;ii<bs;ii++)
74 	  for(jj=0;jj<bs;jj++)
75 	    if(ii==jj)data[ii*M + jj] = 1.0; /* translational modes */
76 	    else data[ii*M + jj] = 0.0;
77         if( a_coords != 0 ) {
78           if( a_ndm == 2 ){ /* rotational modes */
79             data += 2*M;
80             data[0] = -a_coords[2*kk+1];
81             data[1] =  a_coords[2*kk];
82           }
83           else {
84             data += 3*M;
85             data[0] = 0.0;               data[M+0] =  a_coords[3*kk+2]; data[2*M+0] = -a_coords[3*kk+1];
86             data[1] = -a_coords[3*kk+2]; data[M+1] = 0.0;               data[2*M+1] =  a_coords[3*kk];
87             data[2] =  a_coords[3*kk+1]; data[M+2] = -a_coords[3*kk];   data[2*M+2] = 0.0;
88           }
89         }
90       }
91     }
92   }
93   else {
94     for( kk = 0 ; kk < nloc ; kk++ ){
95       for( ii = 0 ; ii < a_ndm ; ii++ ) {
96         pc_gamg->m_data[ii*nloc + kk] =  a_coords[kk*a_ndm + ii];
97       }
98     }
99   }
100 
101   assert(pc_gamg->m_data[arrsz] == -99.);
102   for(kk=0;kk<arrsz;kk++) assert(pc_gamg->m_data[kk] != -999.); // debug
103 
104   pc_gamg->m_data_sz = arrsz;
105   pc_gamg->m_dim = a_ndm;
106 
107   PetscFunctionReturn(0);
108 }
109 EXTERN_C_END
110 
111 
112 /* -----------------------------------------------------------------------------*/
113 #undef __FUNCT__
114 #define __FUNCT__ "PCReset_GAMG"
115 PetscErrorCode PCReset_GAMG(PC pc)
116 {
117   PetscErrorCode  ierr;
118   PC_MG           *mg = (PC_MG*)pc->data;
119   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
120 
121   PetscFunctionBegin;
122   ierr = PetscFree(pc_gamg->m_data);CHKERRQ(ierr);
123   pc_gamg->m_data = 0; pc_gamg->m_data_sz = 0;
124   PetscFunctionReturn(0);
125 }
126 
127 /* -------------------------------------------------------------------------- */
128 /*
129    partitionLevel
130 
131    Input Parameter:
132    . a_Amat_fine - matrix on this fine (k) level
133    . a_ndata_rows - size of data to move (coarse grid)
134    . a_ndata_cols - size of data to move (coarse grid)
135    In/Output Parameter:
136    . a_P_inout - prolongation operator to the next level (k-1)
137    . a_coarse_data - data that need to be moved
138    . a_active_proc - number of active procs
139    Output Parameter:
140    . a_Amat_crs - coarse matrix that is created (k-1)
141 */
142 
143 #undef __FUNCT__
144 #define __FUNCT__ "partitionLevel"
145 PetscErrorCode partitionLevel( Mat a_Amat_fine,
146                                PetscInt a_ndata_rows,
147                                PetscInt a_ndata_cols,
148 			       PetscInt a_cbs,
149                                Mat *a_P_inout,
150                                PetscReal **a_coarse_data,
151                                PetscMPIInt *a_active_proc,
152                                Mat *a_Amat_crs
153                                )
154 {
155   PetscErrorCode   ierr;
156   Mat              Cmat,Pnew,Pold=*a_P_inout;
157   IS               new_indices,isnum;
158   MPI_Comm         wcomm = ((PetscObject)a_Amat_fine)->comm;
159   PetscMPIInt      nactive_procs,mype,npe;
160   PetscInt         Istart,Iend,Istart0,Iend0,ncrs0,ncrs_new,fbs;
161   PetscInt         neq,NN;
162   PetscMPIInt      new_npe,targ_npe;
163 
164   PetscFunctionBegin;
165   ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr);
166   ierr = MPI_Comm_size( wcomm, &npe );  CHKERRQ(ierr);
167   ierr = MatGetBlockSize( a_Amat_fine, &fbs ); CHKERRQ(ierr);
168   /* RAP */
169   ierr = MatPtAP( a_Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
170   ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
171 
172   ierr = MatGetOwnershipRange( Cmat, &Istart0, &Iend0 ); CHKERRQ(ierr);
173   ncrs0 = (Iend0-Istart0)/a_cbs; assert((Iend0-Istart0)%a_cbs == 0);
174 
175   /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
176   ierr = MatGetSize( Cmat, &neq, &NN );CHKERRQ(ierr);
177 #define MIN_EQ_PROC 200
178   nactive_procs = *a_active_proc;
179   targ_npe = neq/MIN_EQ_PROC; /* hardwire min. number of eq/proc */
180 #define TOP_GRID_LIM 1000
181   if( targ_npe == 0 || neq < TOP_GRID_LIM ) new_npe = 1; /* chop coarsest grid */
182   else if (targ_npe >= nactive_procs ) new_npe = nactive_procs; /* no change */
183   else {
184     PetscMPIInt factstart,fact;
185     new_npe = -9999;
186     factstart = nactive_procs;
187     for(fact=factstart;fact>0;fact--){ /* try to find a better number of procs */
188       if( nactive_procs%fact==0 && neq/(nactive_procs/fact) > MIN_EQ_PROC ) {
189         new_npe = nactive_procs/fact;
190       }
191     }
192     if(new_npe == -9999) new_npe = nactive_procs;
193   }
194 
195   if( PETSC_TRUE ) { /* partition: get 'isnewproc' */
196     MatPartitioning  mpart;
197     Mat              adj;
198     const PetscInt  *is_idx;
199     PetscInt         is_sz,kk,jj,ii,old_fact=(npe/nactive_procs), *isnewproc_idx;
200     /* create sub communicator  */
201     MPI_Comm         cm,new_comm;
202     PetscInt         membershipKey = mype%old_fact, new_fact=(npe/new_npe),counts[npe];
203     IS               isnewproc;
204 
205     *a_active_proc = new_npe; /* output for next level */
206     ierr = MPI_Comm_split(wcomm, membershipKey, mype, &cm); CHKERRQ(ierr);
207     ierr = PetscCommDuplicate( cm, &new_comm, PETSC_NULL ); CHKERRQ(ierr);
208     ierr = MPI_Comm_free( &cm );                            CHKERRQ(ierr);
209 
210     /* MatPartitioningApply call MatConvert, which is collective */
211     if( a_cbs==1) {
212       ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
213     }
214     else {
215       /* make a scalar matrix to partition */
216       Mat tMat;
217       PetscInt Ii,ncols; const PetscScalar *vals; const PetscInt *idx;
218       MatInfo info;
219       ierr = MatGetInfo(Cmat,MAT_LOCAL,&info); CHKERRQ(ierr);
220       ncols = (PetscInt)info.nz_used/(ncrs0*a_cbs*a_cbs)+1;
221 
222       ierr = MatCreateMPIAIJ( wcomm, ncrs0, ncrs0,
223                               PETSC_DETERMINE, PETSC_DETERMINE,
224                               2*ncols, PETSC_NULL, ncols, PETSC_NULL,
225                               &tMat );
226       CHKERRQ(ierr);
227 
228       for ( Ii = Istart0; Ii < Iend0; Ii++ ) {
229         PetscInt dest_row = Ii/a_cbs;
230         ierr = MatGetRow(Cmat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr);
231         for( jj = 0 ; jj < ncols ; jj++ ){
232           PetscInt dest_col = idx[jj]/a_cbs;
233           PetscScalar v = 1.0;
234           ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr);
235         }
236         ierr = MatRestoreRow(Cmat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr);
237       }
238       ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
239       ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
240 
241       ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
242 
243       ierr = MatDestroy( &tMat );  CHKERRQ(ierr);
244     }
245     if( membershipKey == 0 ) {
246       if(ncrs0==0)SETERRQ(wcomm,PETSC_ERR_LIB,"zero local nodes -- increase min");
247       /* hack to fix global data that pmetis.c uses in 'adj' */
248       for( kk=0 , jj=0 ; kk<=npe ; jj++, kk += old_fact ) {
249         adj->rmap->range[jj] = adj->rmap->range[kk];
250       }
251       ierr = MatPartitioningCreate( new_comm, &mpart ); CHKERRQ(ierr);
252       ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
253       ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr);
254       ierr = MatPartitioningSetNParts( mpart, new_npe ); CHKERRQ(ierr);
255       ierr = MatPartitioningApply( mpart, &isnewproc ); CHKERRQ(ierr);
256       ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr);
257       /* collect IS info */
258       ierr = ISGetLocalSize( isnewproc, &is_sz );        CHKERRQ(ierr);
259       ierr = PetscMalloc( a_cbs*is_sz*sizeof(PetscInt), &isnewproc_idx ); CHKERRQ(ierr);
260       ierr = ISGetIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
261       /* spread partitioning across machine - probably the right thing to do but machine spec. */
262       for(kk=0,jj=0;kk<is_sz;kk++){
263         for(ii=0 ; ii<a_cbs ; ii++, jj++ ) { /* expand for equation level by 'bs' */
264           isnewproc_idx[jj] = is_idx[kk] * new_fact;
265         }
266       }
267       ierr = ISRestoreIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
268       ierr = ISDestroy( &isnewproc );                    CHKERRQ(ierr);
269       is_sz *= a_cbs;
270     }
271     else {
272       isnewproc_idx = 0;
273       is_sz = 0;
274     }
275     ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
276     ierr = MPI_Comm_free( &new_comm );    CHKERRQ(ierr);
277     ierr = ISCreateGeneral( wcomm, is_sz, isnewproc_idx, PETSC_COPY_VALUES, &isnewproc );
278     if( membershipKey == 0 ) {
279       ierr = PetscFree( isnewproc_idx );  CHKERRQ(ierr);
280     }
281 
282     /*
283      Create an index set from the isnewproc index set to indicate the mapping TO
284      */
285     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
286     /*
287      Determine how many elements are assigned to each processor
288      */
289     ierr = ISPartitioningCount( isnewproc, npe, counts ); CHKERRQ(ierr);
290     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
291     ncrs_new = counts[mype]/a_cbs;
292 
293     { /* Create a vector to contain the newly ordered element information */
294       const PetscInt *idx, data_sz=a_ndata_rows*a_ndata_cols;
295       const PetscInt  stride0=ncrs0*a_ndata_rows,strideNew=ncrs_new*a_ndata_rows;
296       PetscInt        ii,jj,kk;
297       IS              isscat;
298       PetscScalar    *array;
299       Vec             src_crd, dest_crd;
300       PetscReal      *data = *a_coarse_data;
301       VecScatter      vecscat;
302       PetscInt        tidx[ncrs0*data_sz];
303 
304       ierr = VecCreate( wcomm, &dest_crd );
305       ierr = VecSetSizes( dest_crd, data_sz*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
306       ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /*funny vector-get global options?*/
307       /*
308        There are 'a_ndata_rows*a_ndata_cols' data items per node, (one can think of the vectors of having
309        a block size of ...).  Note, ISs are expanded into equation space by 'a_cbs'.
310        */
311       ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
312       for(ii=0,jj=0; ii<ncrs0 ; ii++) {
313         PetscInt id = idx[ii*a_cbs]/a_cbs; /* get node back */
314         for( kk=0; kk<data_sz ; kk++, jj++) tidx[jj] = id*data_sz + kk;
315       }
316       ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
317       ierr = ISCreateGeneral( wcomm, data_sz*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
318       CHKERRQ(ierr);
319       /*
320        Create a vector to contain the original vertex information for each element
321        */
322       ierr = VecCreateSeq( PETSC_COMM_SELF, data_sz*ncrs0, &src_crd ); CHKERRQ(ierr);
323       for( jj=0; jj<a_ndata_cols ; jj++ ) {
324         for( ii=0 ; ii<ncrs0 ; ii++) {
325           for( kk=0; kk<a_ndata_rows ; kk++ ) {
326             PetscInt ix = ii*a_ndata_rows + kk + jj*stride0, jx = ii*data_sz + kk*a_ndata_cols + jj;
327             ierr = VecSetValues( src_crd, 1, &jx, &data[ix], INSERT_VALUES );  CHKERRQ(ierr);
328           }
329         }
330       }
331       ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr);
332       ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr);
333       /*
334        Scatter the element vertex information (still in the original vertex ordering)
335        to the correct processor
336        */
337       ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
338       CHKERRQ(ierr);
339       ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
340       ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
341       ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
342       ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
343       ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
344       /*
345        Put the element vertex data into a new allocation of the gdata->ele
346        */
347       ierr = PetscFree( *a_coarse_data );    CHKERRQ(ierr);
348       ierr = PetscMalloc( data_sz*ncrs_new*sizeof(PetscReal), a_coarse_data );    CHKERRQ(ierr);
349 
350       ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
351       data = *a_coarse_data;
352       for( jj=0; jj<a_ndata_cols ; jj++ ) {
353         for( ii=0 ; ii<ncrs_new ; ii++) {
354           for( kk=0; kk<a_ndata_rows ; kk++ ) {
355             PetscInt ix = ii*a_ndata_rows + kk + jj*strideNew, jx = ii*data_sz + kk*a_ndata_cols + jj;
356             data[ix] = PetscRealPart(array[jx]);
357             array[jx] = 1.e300;
358           }
359         }
360       }
361       ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
362       ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
363     }
364     /*
365      Invert for MatGetSubMatrix
366      */
367     ierr = ISInvertPermutation( isnum, ncrs_new*a_cbs, &new_indices ); CHKERRQ(ierr);
368     ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
369     ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
370     /* A_crs output */
371     ierr = MatGetSubMatrix( Cmat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
372     CHKERRQ(ierr);
373 
374     ierr = MatDestroy( &Cmat ); CHKERRQ(ierr);
375     Cmat = *a_Amat_crs; /* output */
376     ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
377 
378     /* prolongator */
379     ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
380     {
381       IS findices;
382       ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
383       ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
384       CHKERRQ(ierr);
385       ierr = ISDestroy( &findices ); CHKERRQ(ierr);
386     }
387     ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
388     *a_P_inout = Pnew; /* output */
389     ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
390   }
391   else {
392     *a_Amat_crs = Cmat; /* output */
393   }
394 
395   PetscFunctionReturn(0);
396 }
397 
398 #define GAMG_MAXLEVELS 30
399 #if defined(PETSC_USE_LOG)
400 PetscLogStage  gamg_stages[20];
401 #endif
402 /* -------------------------------------------------------------------------- */
403 /*
404    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
405                     by setting data structures and options.
406 
407    Input Parameter:
408 .  pc - the preconditioner context
409 
410    Application Interface Routine: PCSetUp()
411 
412    Notes:
413    The interface routine PCSetUp() is not usually called directly by
414    the user, but instead is called by PCApply() if necessary.
415 */
416 #undef __FUNCT__
417 #define __FUNCT__ "PCSetUp_GAMG"
418 PetscErrorCode PCSetUp_GAMG( PC a_pc )
419 {
420   PetscErrorCode  ierr;
421   PC_MG           *mg = (PC_MG*)a_pc->data;
422   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
423   Mat              Amat = a_pc->mat, Pmat = a_pc->pmat;
424   PetscInt         fine_level, level, level1, M, N, bs, nloc, lidx, Istart, Iend;
425   MPI_Comm         wcomm = ((PetscObject)a_pc)->comm;
426   PetscMPIInt      mype,npe,nactivepe;
427   PetscBool        isOK;
428   Mat              Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];
429   PetscReal       *coarse_data = 0, *data, emaxs[GAMG_MAXLEVELS];
430 
431   PetscFunctionBegin;
432   if( a_pc->setupcalled ) {
433     /* no state data in GAMG to destroy */
434     ierr = PCReset_MG( a_pc ); CHKERRQ(ierr);
435   }
436   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
437   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
438   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
439   ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr);
440   ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr);
441   ierr = MatGetOwnershipRange( Amat, &Istart, &Iend ); CHKERRQ(ierr);
442   nloc = (Iend-Istart)/bs; assert((Iend-Istart)%bs == 0);
443 
444   /* get data of not around */
445   if( pc_gamg->m_data == 0 && nloc > 0 ) {
446     ierr  = PCSetCoordinates_GAMG( a_pc, -1, 0 );    CHKERRQ( ierr );
447   }
448   data = pc_gamg->m_data;
449 
450   /* Get A_i and R_i */
451 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nloc=%d, np=%d\n",0,__FUNCT__,0,N,pc_gamg->m_data_rows,pc_gamg->m_data_cols,nloc,npe);
452   for ( level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */
453         level < GAMG_MAXLEVELS-1 && (level==0 || M/bs>TOP_GRID_LIM) && (npe==1 || nactivepe>1);
454         level++ ) {
455     level1 = level + 1;
456     ierr = PetscLogEventBegin(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
457     ierr = createProlongation(Aarr[level], data, pc_gamg->m_dim, pc_gamg->m_data_cols, pc_gamg->m_useSA,
458                               &bs, &Parr[level1], &coarse_data, &isOK, &emaxs[level] );
459     CHKERRQ(ierr);
460     ierr = PetscFree( data ); CHKERRQ( ierr );
461     ierr = PetscLogEventEnd(gamg_setup_stages[SET1],0,0,0,0);CHKERRQ(ierr);
462 
463     if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */
464 
465     if( isOK ) {
466       ierr = PetscLogEventBegin(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
467       ierr = partitionLevel( Aarr[level], pc_gamg->m_useSA ? bs : 1, pc_gamg->m_data_cols, bs,
468                              &Parr[level1], &coarse_data, &nactivepe, &Aarr[level1] );
469       CHKERRQ(ierr);
470       ierr = PetscLogEventEnd(gamg_setup_stages[SET2],0,0,0,0);CHKERRQ(ierr);
471       ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
472 PetscPrintf(PETSC_COMM_WORLD,"\t\t[%d]%s %d) N=%d, bs=%d, n data cols=%d, %d active pes\n",0,__FUNCT__,level1,N,bs,pc_gamg->m_data_cols,nactivepe);
473       /* coarse grids with SA can have zero row/cols from singleton aggregates */
474       /* aggregation method can probably gaurrentee this does not happen! - be safe for now */
475       if( PETSC_TRUE ){
476         Vec diag; PetscScalar *data_arr,v; PetscInt Istart,Iend,kk,nloc,id;
477         ierr = MatGetOwnershipRange(Aarr[level1], &Istart, &Iend); CHKERRQ(ierr);
478         nloc = Iend-Istart;
479         ierr = MatGetVecs( Aarr[level1], &diag, 0 );    CHKERRQ(ierr);
480         ierr = MatGetDiagonal( Aarr[level1], diag );    CHKERRQ(ierr);
481         ierr = VecGetArray( diag, &data_arr );   CHKERRQ(ierr);
482         for(kk=0;kk<nloc;kk++){
483           if(data_arr[kk]==0.0) {
484             id = kk + Istart; v = 1.e-1;
485             ierr = MatSetValues(Aarr[level1],1,&id,1,&id,&v,INSERT_VALUES);
486             CHKERRQ(ierr);
487             /* PetscPrintf(PETSC_COMM_SELF,"[%d]%s warning: added diag to zero (%d) on level %d \n",mype,__FUNCT__,id,level); */
488           }
489         }
490         ierr = VecRestoreArray( diag, &data_arr ); CHKERRQ(ierr);
491         ierr = VecDestroy( &diag );                CHKERRQ(ierr);
492         ierr = MatAssemblyBegin(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
493         ierr = MatAssemblyEnd(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
494       }
495     }
496     else{
497       break;
498     }
499     data = coarse_data;
500   }
501   ierr = PetscFree( coarse_data ); CHKERRQ( ierr );
502 PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1);
503   pc_gamg->m_data = 0; /* destroyed coordinate data */
504   pc_gamg->m_Nlevels = level + 1;
505   fine_level = level;
506   ierr = PCMGSetLevels(a_pc,pc_gamg->m_Nlevels,PETSC_NULL);CHKERRQ(ierr);
507 
508   /* set default smoothers */
509   for ( lidx=1, level = pc_gamg->m_Nlevels-2;
510         lidx <= fine_level;
511         lidx++, level--) {
512     PetscReal emax, emin;
513     KSP smoother; PC subpc;
514     ierr = PCMGGetSmoother( a_pc, lidx, &smoother ); CHKERRQ(ierr);
515     ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr);
516     if( emaxs[level] > 0.0 ) emax = emaxs[level];
517     else{ /* eigen estimate 'emax' */
518       KSP eksp; Mat Lmat = Aarr[level];
519       Vec bb, xx; PC pc;
520 
521       ierr = MatGetVecs( Lmat, &bb, 0 );         CHKERRQ(ierr);
522       ierr = MatGetVecs( Lmat, &xx, 0 );         CHKERRQ(ierr);
523       {
524 	PetscRandom    rctx;
525 	ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr);
526 	ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
527 	ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
528 	ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr);
529       }
530       ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr);
531       ierr = KSPSetType( eksp, KSPCG );                      CHKERRQ(ierr);
532       ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr);
533       ierr = KSPSetOperators( eksp, Lmat, Lmat, DIFFERENT_NONZERO_PATTERN ); CHKERRQ( ierr );
534       ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr );
535       ierr = PCSetType( pc, PETSC_GAMG_SMOOTHER ); CHKERRQ(ierr); /* should be same as above */
536       ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 );
537       CHKERRQ(ierr);
538       //ierr = KSPSetConvergenceTest( eksp, KSPSkipConverged, 0, 0 ); CHKERRQ(ierr);
539       ierr = KSPSetNormType( eksp, KSP_NORM_NONE );                 CHKERRQ(ierr);
540 
541       ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr);
542       ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr);
543       ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr);
544       ierr = VecDestroy( &xx );       CHKERRQ(ierr);
545       ierr = VecDestroy( &bb );       CHKERRQ(ierr);
546       ierr = KSPDestroy( &eksp );       CHKERRQ(ierr);
547       PetscPrintf(PETSC_COMM_WORLD,"%s max eigen = %e min = %e\n",__FUNCT__,emax,emin);
548     }
549     {
550       PetscInt N1, N0, tt;
551       ierr = MatGetSize( Aarr[level], &N1, &tt );         CHKERRQ(ierr);
552       ierr = MatGetSize( Aarr[level+1], &N0, &tt );       CHKERRQ(ierr);
553       emin = .5*emax/((PetscReal)N1/(PetscReal)N0); /* this should be about the coarsening rate */
554       emax *= 1.05;
555     }
556 
557     ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], DIFFERENT_NONZERO_PATTERN );
558     ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr);
559     ierr = KSPSetTolerances(smoother,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,2); CHKERRQ(ierr);
560     ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
561     ierr = PCSetType( subpc, PETSC_GAMG_SMOOTHER ); CHKERRQ(ierr);
562     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
563   }
564   {
565     KSP smoother; /* coarse grid */
566     Mat Lmat = Aarr[pc_gamg->m_Nlevels-1];
567     ierr = PCMGGetSmoother( a_pc, 0, &smoother ); CHKERRQ(ierr);
568     ierr = KSPSetOperators( smoother, Lmat, Lmat, DIFFERENT_NONZERO_PATTERN );
569     CHKERRQ(ierr);
570     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
571   }
572   /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
573   ierr = PCSetFromOptions_MG(a_pc); CHKERRQ(ierr);
574   {
575     PetscBool galerkin;
576     ierr = PCMGGetGalerkin( a_pc,  &galerkin); CHKERRQ(ierr);
577     if(galerkin){
578       SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG.");
579     }
580   }
581 
582   /* set interpolation between the levels, create timer stages, clean up */
583   if( PETSC_FALSE ) {
584     char str[32];
585     sprintf(str,"MG Level %d (%d)",0,pc_gamg->m_Nlevels-1);
586     PetscLogStageRegister(str, &gamg_stages[fine_level]);
587   }
588   for (lidx=0,level=pc_gamg->m_Nlevels-1;
589        lidx<fine_level;
590        lidx++, level--){
591     ierr = PCMGSetInterpolation( a_pc, lidx+1, Parr[level] );CHKERRQ(ierr);
592     if( !PETSC_TRUE ) {
593       PetscViewer viewer; char fname[32];
594       sprintf(fname,"Pmat_%d.m",level);
595       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
596       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
597       ierr = MatView( Parr[level], viewer ); CHKERRQ(ierr);
598       ierr = PetscViewerDestroy( &viewer );
599       sprintf(fname,"Amat_%d.m",level);
600       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
601       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
602       ierr = MatView( Aarr[level], viewer ); CHKERRQ(ierr);
603       ierr = PetscViewerDestroy( &viewer );
604     }
605     ierr = MatDestroy( &Parr[level] );  CHKERRQ(ierr);
606     ierr = MatDestroy( &Aarr[level] );  CHKERRQ(ierr);
607     if( PETSC_FALSE ) {
608       char str[32];
609       sprintf(str,"MG Level %d (%d)",lidx+1,level-1);
610       PetscLogStageRegister(str, &gamg_stages[level-1]);
611     }
612   }
613 
614   /* setupcalled is set to 0 so that MG is setup from scratch */
615   a_pc->setupcalled = 0;
616   ierr = PCSetUp_MG(a_pc);CHKERRQ(ierr);
617 
618   PetscFunctionReturn(0);
619 }
620 
621 /* ------------------------------------------------------------------------- */
622 /*
623    PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
624    that was created with PCCreate_GAMG().
625 
626    Input Parameter:
627 .  pc - the preconditioner context
628 
629    Application Interface Routine: PCDestroy()
630 */
631 #undef __FUNCT__
632 #define __FUNCT__ "PCDestroy_GAMG"
633 PetscErrorCode PCDestroy_GAMG(PC pc)
634 {
635   PetscErrorCode  ierr;
636   PC_MG           *mg = (PC_MG*)pc->data;
637   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
638 
639   PetscFunctionBegin;
640   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
641   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
642   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
643   PetscFunctionReturn(0);
644 }
645 
646 #undef __FUNCT__
647 #define __FUNCT__ "PCSetFromOptions_GAMG"
648 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
649 {
650   /* PetscErrorCode  ierr; */
651   /* PC_MG           *mg = (PC_MG*)pc->data; */
652   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
653   /* MPI_Comm        comm = ((PetscObject)pc)->comm; */
654 
655   PetscFunctionBegin;
656   PetscFunctionReturn(0);
657 }
658 
659 /* -------------------------------------------------------------------------- */
660 /*
661  PCCreate_GAMG - Creates a GAMG preconditioner context, PC_GAMG
662 
663    Input Parameter:
664 .  pc - the preconditioner context
665 
666    Application Interface Routine: PCCreate()
667 
668   */
669  /* MC
670      PCGAMG - Use algebraic multigrid preconditioning. This preconditioner requires you provide
671        fine grid discretization matrix and coordinates on the fine grid.
672 
673    Options Database Key:
674    Multigrid options(inherited)
675 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (MGSetCycles)
676 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (MGSetNumberSmoothUp)
677 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (MGSetNumberSmoothDown)
678    -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
679    GAMG options:
680 
681    Level: intermediate
682   Concepts: multigrid
683 
684 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
685            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), MPSetCycles(), PCMGSetNumberSmoothDown(),
686            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
687            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
688            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
689 M */
690 
691 EXTERN_C_BEGIN
692 #undef __FUNCT__
693 #define __FUNCT__ "PCCreate_GAMG"
694 PetscErrorCode  PCCreate_GAMG(PC pc)
695 {
696   PetscErrorCode  ierr;
697   PC_GAMG         *pc_gamg;
698   PC_MG           *mg;
699   PetscClassId     cookie;
700 
701   PetscFunctionBegin;
702   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
703   ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
704   ierr = PetscObjectChangeTypeName((PetscObject)pc,PCGAMG);CHKERRQ(ierr);
705 
706   /* create a supporting struct and attach it to pc */
707   ierr = PetscNewLog(pc,PC_GAMG,&pc_gamg);CHKERRQ(ierr);
708   mg = (PC_MG*)pc->data;
709   mg->innerctx = pc_gamg;
710 
711   pc_gamg->m_Nlevels    = -1;
712 
713   /* overwrite the pointers of PCMG by the functions of PCGAMG */
714   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
715   pc->ops->setup          = PCSetUp_GAMG;
716   pc->ops->reset          = PCReset_GAMG;
717   pc->ops->destroy        = PCDestroy_GAMG;
718 
719   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
720 					    "PCSetCoordinates_C",
721 					    "PCSetCoordinates_GAMG",
722 					    PCSetCoordinates_GAMG);CHKERRQ(ierr);
723 
724   PetscClassIdRegister("GAMG Setup",&cookie);
725   PetscLogEventRegister("GAMG-createProl", cookie, &gamg_setup_stages[SET1]);
726   PetscLogEventRegister("GAMG-partLevel", cookie, &gamg_setup_stages[SET2]);
727   PetscLogEventRegister("GAMG-MIS Graph", cookie, &gamg_setup_stages[SET3]);
728   PetscLogEventRegister("GAMG-MIS-Agg", cookie, &gamg_setup_stages[SET4]);
729   PetscLogEventRegister("GAMG-growSupp", cookie, &gamg_setup_stages[SET5]);
730   PetscLogEventRegister("GAMG-tri-Prol", cookie, &gamg_setup_stages[SET6]);
731   PetscLogEventRegister("GAMG-sa-init", cookie, &gamg_setup_stages[SET7]);
732   PetscLogEventRegister("GAMG-sa-frmProl0", cookie, &gamg_setup_stages[SET8]);
733   PetscLogEventRegister("GAMG-sa-smooth", cookie, &gamg_setup_stages[SET9]);
734 
735   PetscFunctionReturn(0);
736 }
737 EXTERN_C_END
738