xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 8cdbd7576f676fd2f0cf4ce91c9fbf7f9b0d4574)
1 /*
2  GAMG geometric-algebric multiogrid PC - Mark Adams 2011
3  */
4 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
5 #include "private/matimpl.h"
6 
7 #if defined PETSC_USE_LOG
8 PetscLogEvent gamg_setup_events[NUM_SET];
9 #endif
10 #define GAMG_MAXLEVELS 30
11 
12 /*#define GAMG_STAGES*/
13 #if (defined PETSC_USE_LOG && defined GAMG_STAGES)
14 static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
15 #endif
16 
17 /* Private context for the GAMG preconditioner */
18 static PetscBool s_avoid_repart = PETSC_FALSE;
19 static PetscInt s_min_eq_proc = 600;
20 static PetscReal s_threshold = 0.05;
21 typedef struct gamg_TAG {
22   PetscInt       m_dim;
23   PetscInt       m_Nlevels;
24   PetscInt       m_data_sz;
25   PetscInt       m_data_rows;
26   PetscInt       m_data_cols;
27   PetscInt       m_count;
28   PetscInt       m_method; /* 0: geomg; 1: plain agg 'pa'; 2: smoothed agg 'sa' */
29   PetscReal     *m_data; /* blocked vector of vertex data on fine grid (coordinates) */
30   char           m_type[64];
31 } PC_GAMG;
32 
33 /* -------------------------------------------------------------------------- */
34 /*
35    PCSetCoordinates_GAMG
36 
37    Input Parameter:
38    .  pc - the preconditioner context
39 */
40 EXTERN_C_BEGIN
41 #undef __FUNCT__
42 #define __FUNCT__ "PCSetCoordinates_GAMG"
43 PetscErrorCode PCSetCoordinates_GAMG( PC a_pc, PetscInt a_ndm, PetscReal *a_coords )
44 {
45   PC_MG          *mg = (PC_MG*)a_pc->data;
46   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
47   PetscErrorCode ierr;
48   PetscInt       arrsz,bs,my0,kk,ii,jj,nloc,Iend;
49   Mat            Amat = a_pc->pmat;
50 
51   PetscFunctionBegin;
52   PetscValidHeaderSpecific( Amat, MAT_CLASSID, 1 );
53   ierr  = MatGetBlockSize( Amat, &bs );               CHKERRQ( ierr );
54   ierr  = MatGetOwnershipRange( Amat, &my0, &Iend ); CHKERRQ(ierr);
55   nloc = (Iend-my0)/bs;
56   if((Iend-my0)%bs!=0) SETERRQ1(((PetscObject)Amat)->comm,PETSC_ERR_ARG_WRONG, "Bad local size %d.",nloc);
57 
58   pc_gamg->m_data_rows = 1;
59   if(a_coords==0 && pc_gamg->m_method==0) pc_gamg->m_method = 2; /* use SA if no coords */
60   if( pc_gamg->m_method==0 ) pc_gamg->m_data_cols = a_ndm; /* coordinates */
61   else{ /* SA: null space vectors */
62     if(a_coords != 0 && bs==1 ) pc_gamg->m_data_cols = 1; /* scalar w/ coords and SA (not needed) */
63     else if(a_coords != 0 ) pc_gamg->m_data_cols = (a_ndm==2 ? 3 : 6); /* elasticity */
64     else pc_gamg->m_data_cols = bs; /* no data, force SA with constant null space vectors */
65     pc_gamg->m_data_rows = bs;
66   }
67   arrsz = nloc*pc_gamg->m_data_rows*pc_gamg->m_data_cols;
68 
69   /* create data - syntactic sugar that should be refactored at some point */
70   if (pc_gamg->m_data==0 || (pc_gamg->m_data_sz != arrsz)) {
71     ierr = PetscFree( pc_gamg->m_data );  CHKERRQ(ierr);
72     ierr = PetscMalloc((arrsz+1)*sizeof(double), &pc_gamg->m_data ); CHKERRQ(ierr);
73   }
74   for(kk=0;kk<arrsz;kk++)pc_gamg->m_data[kk] = -999.;
75   pc_gamg->m_data[arrsz] = -99.;
76   /* copy data in - column oriented */
77   if( pc_gamg->m_method != 0 ) {
78     const PetscInt M = Iend - my0;
79     for(kk=0;kk<nloc;kk++){
80       PetscReal *data = &pc_gamg->m_data[kk*bs];
81       if( pc_gamg->m_data_cols==1 ) *data = 1.0;
82       else {
83         for(ii=0;ii<bs;ii++)
84 	  for(jj=0;jj<bs;jj++)
85 	    if(ii==jj)data[ii*M + jj] = 1.0; /* translational modes */
86 	    else data[ii*M + jj] = 0.0;
87         if( a_coords != 0 ) {
88           if( a_ndm == 2 ){ /* rotational modes */
89             data += 2*M;
90             data[0] = -a_coords[2*kk+1];
91             data[1] =  a_coords[2*kk];
92           }
93           else {
94             data += 3*M;
95             data[0] = 0.0;               data[M+0] =  a_coords[3*kk+2]; data[2*M+0] = -a_coords[3*kk+1];
96             data[1] = -a_coords[3*kk+2]; data[M+1] = 0.0;               data[2*M+1] =  a_coords[3*kk];
97             data[2] =  a_coords[3*kk+1]; data[M+2] = -a_coords[3*kk];   data[2*M+2] = 0.0;
98           }
99         }
100       }
101     }
102   }
103   else {
104     for( kk = 0 ; kk < nloc ; kk++ ){
105       for( ii = 0 ; ii < a_ndm ; ii++ ) {
106         pc_gamg->m_data[ii*nloc + kk] =  a_coords[kk*a_ndm + ii];
107       }
108     }
109   }
110   assert(pc_gamg->m_data[arrsz] == -99.);
111 
112   pc_gamg->m_data_sz = arrsz;
113   pc_gamg->m_dim = a_ndm;
114 
115   PetscFunctionReturn(0);
116 }
117 EXTERN_C_END
118 
119 
120 /* ----------------------------------------------------------------------------- */
121 #undef __FUNCT__
122 #define __FUNCT__ "PCReset_GAMG"
123 PetscErrorCode PCReset_GAMG(PC pc)
124 {
125   PetscErrorCode  ierr;
126   PC_MG           *mg = (PC_MG*)pc->data;
127   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
128 
129   PetscFunctionBegin;
130   if( pc_gamg->m_data != 0 ) { /* this should not happen, cleaned up in SetUp */
131     ierr = PetscFree(pc_gamg->m_data); CHKERRQ(ierr);
132   }
133   pc_gamg->m_data = 0; pc_gamg->m_data_sz = 0;
134   PetscFunctionReturn(0);
135 }
136 
137 /* -------------------------------------------------------------------------- */
138 /*
139    partitionLevel
140 
141    Input Parameter:
142    . a_Amat_fine - matrix on this fine (k) level
143    . a_ndata_rows - size of data to move (coarse grid)
144    . a_ndata_cols - size of data to move (coarse grid)
145    In/Output Parameter:
146    . a_P_inout - prolongation operator to the next level (k-1)
147    . a_coarse_data - data that need to be moved
148    . a_nactive_proc - number of active procs
149    Output Parameter:
150    . a_Amat_crs - coarse matrix that is created (k-1)
151 */
152 
153 #define TOP_GRID_LIM 2*s_min_eq_proc /* this will happen anyway */
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "partitionLevel"
157 PetscErrorCode partitionLevel( Mat a_Amat_fine,
158                                PetscInt a_ndata_rows,
159                                PetscInt a_ndata_cols,
160 			       PetscInt a_cbs,
161                                Mat *a_P_inout,
162                                PetscReal **a_coarse_data,
163                                PetscMPIInt *a_nactive_proc,
164                                Mat *a_Amat_crs
165                                )
166 {
167   PetscErrorCode   ierr;
168   Mat              Cmat,Pnew,Pold=*a_P_inout;
169   IS               new_indices,isnum;
170   MPI_Comm         wcomm = ((PetscObject)a_Amat_fine)->comm;
171   PetscMPIInt      mype,npe,new_npe,nactive;
172   PetscInt         neq,NN,Istart,Iend,Istart0,Iend0,ncrs_new,ncrs0;
173 
174   PetscFunctionBegin;
175   ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr);
176   ierr = MPI_Comm_size( wcomm, &npe );  CHKERRQ(ierr);
177   /* RAP */
178 #ifdef USE_R
179   /* make R wih brute force for now */
180   ierr = MatTranspose( Pold, Pnew );
181   ierr = MatDestroy( &Pold );  CHKERRQ(ierr);
182   ierr = MatRARt( a_Amat_fine, Pnew, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
183   Pold = Pnew;
184 #else
185   ierr = MatPtAP( a_Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
186 #endif
187   ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
188   ierr = MatGetOwnershipRange( Cmat, &Istart0, &Iend0 ); CHKERRQ(ierr);
189   ncrs0 = (Iend0-Istart0)/a_cbs; assert((Iend0-Istart0)%a_cbs == 0);
190 
191   if( s_avoid_repart ) {
192     *a_Amat_crs = Cmat; /* output */
193   }
194   else {
195     /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
196     Mat              adj;
197     const PetscInt *idx,data_sz=a_ndata_rows*a_ndata_cols;
198     const PetscInt  stride0=ncrs0*a_ndata_rows;
199     PetscInt        is_sz,*isnewproc_idx,ii,jj,kk,strideNew,*tidx;
200     /* create sub communicator  */
201     MPI_Comm        cm,new_comm;
202     MPI_Group       wg, g2;
203     PetscInt       *counts,inpe;
204     PetscMPIInt    *ranks;
205     IS              isscat;
206     PetscScalar    *array;
207     Vec             src_crd, dest_crd;
208     PetscReal      *data = *a_coarse_data;
209     VecScatter      vecscat;
210     IS  isnewproc;
211 
212     /* get number of PEs to make active, reduce */
213     ierr = MatGetSize( Cmat, &neq, &NN );  CHKERRQ(ierr);
214     new_npe = neq/s_min_eq_proc; /* hardwire min. number of eq/proc */
215     if( new_npe == 0 || neq < TOP_GRID_LIM ) new_npe = 1;
216     else if (new_npe >= *a_nactive_proc ) new_npe = *a_nactive_proc; /* no change, rare */
217 
218     ierr = PetscMalloc( npe*sizeof(PetscMPIInt), &ranks ); CHKERRQ(ierr);
219     ierr = PetscMalloc( npe*sizeof(PetscInt), &counts ); CHKERRQ(ierr);
220 
221     ierr = MPI_Allgather( &ncrs0, 1, MPIU_INT, counts, 1, MPIU_INT, wcomm ); CHKERRQ(ierr);
222     assert(counts[mype]==ncrs0);
223     /* count real active pes */
224     for( nactive = jj = 0 ; jj < npe ; jj++) {
225       if( counts[jj] != 0 ) {
226 	ranks[nactive++] = jj;
227         }
228     }
229 
230     if (nactive < new_npe) new_npe = nactive; /* this can happen with empty input procs */
231 
232 #ifdef VERBOSE
233     PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s npe (active): %d --> %d. new npe = %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,nactive,new_npe,neq);
234 #endif
235 
236     *a_nactive_proc = new_npe; /* output */
237 
238     ierr = MPI_Comm_group( wcomm, &wg ); CHKERRQ(ierr);
239     ierr = MPI_Group_incl( wg, nactive, ranks, &g2 ); CHKERRQ(ierr);
240     ierr = MPI_Comm_create( wcomm, g2, &cm ); CHKERRQ(ierr);
241 
242     if( cm != MPI_COMM_NULL ) {
243       assert(ncrs0 != 0);
244       ierr = PetscCommDuplicate( cm, &new_comm, PETSC_NULL ); CHKERRQ(ierr);
245       ierr = MPI_Comm_free( &cm );                             CHKERRQ(ierr);
246     }
247     else assert(ncrs0 == 0);
248 
249     ierr = MPI_Group_free( &wg );                            CHKERRQ(ierr);
250     ierr = MPI_Group_free( &g2 );                            CHKERRQ(ierr);
251 
252     /* MatPartitioningApply call MatConvert, which is collective */
253 #if defined PETSC_USE_LOG
254     ierr = PetscLogEventBegin(gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
255 #endif
256     if( a_cbs == 1) {
257       ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
258     }
259     else{
260       /* make a scalar matrix to partition */
261       Mat tMat;
262       PetscInt ncols,jj,Ii;
263       const PetscScalar *vals;
264       const PetscInt *idx;
265       PetscInt *d_nnz;
266       static int llev = 0;
267 
268       ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr);
269       for ( Ii = Istart0, jj = 0 ; Ii < Iend0 ; Ii += a_cbs, jj++ ) {
270         ierr = MatGetRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr);
271         d_nnz[jj] = ncols/a_cbs;
272         if( d_nnz[jj] > ncrs0 ) d_nnz[jj] = ncrs0;
273         ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr);
274       }
275 
276       ierr = MatCreateMPIAIJ( wcomm, ncrs0, ncrs0,
277                               PETSC_DETERMINE, PETSC_DETERMINE,
278                               0, d_nnz, 0, d_nnz,
279                               &tMat );
280       CHKERRQ(ierr);
281       ierr = PetscFree( d_nnz ); CHKERRQ(ierr);
282 
283       for ( ii = Istart0; ii < Iend0; ii++ ) {
284         PetscInt dest_row = ii/a_cbs;
285         ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr);
286         for( jj = 0 ; jj < ncols ; jj++ ){
287           PetscInt dest_col = idx[jj]/a_cbs;
288           PetscScalar v = 1.0;
289           ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr);
290         }
291         ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr);
292       }
293       ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
294       ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
295 
296       if( llev++ == -1 ) {
297 	PetscViewer viewer; char fname[32];
298 	sprintf(fname,"part_mat_%d.mat",llev);
299 	PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer);
300 	ierr = MatView( tMat, viewer ); CHKERRQ(ierr);
301 	ierr = PetscViewerDestroy( &viewer );
302       }
303 
304       ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
305 
306       ierr = MatDestroy( &tMat );  CHKERRQ(ierr);
307     }
308     if( ncrs0 != 0 ){
309       const PetscInt *is_idx;
310       MatPartitioning  mpart;
311       /* hack to fix global data that pmetis.c uses in 'adj' */
312       for( nactive = jj = 0 ; jj < npe ; jj++ ) {
313 	if( counts[jj] != 0 ) {
314 	  adj->rmap->range[nactive++] = adj->rmap->range[jj];
315 	}
316       }
317       adj->rmap->range[nactive] = adj->rmap->range[npe];
318 
319       ierr = MatPartitioningCreate( new_comm, &mpart ); CHKERRQ(ierr);
320       ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
321       ierr = MatPartitioningSetFromOptions( mpart );    CHKERRQ(ierr);
322       ierr = MatPartitioningSetNParts( mpart, new_npe );CHKERRQ(ierr);
323       ierr = MatPartitioningApply( mpart, &isnewproc ); CHKERRQ(ierr);
324       ierr = MatPartitioningDestroy( &mpart );          CHKERRQ(ierr);
325 
326       /* collect IS info */
327       ierr = ISGetLocalSize( isnewproc, &is_sz );       CHKERRQ(ierr);
328       ierr = PetscMalloc( a_cbs*is_sz*sizeof(PetscInt), &isnewproc_idx ); CHKERRQ(ierr);
329       ierr = ISGetIndices( isnewproc, &is_idx );        CHKERRQ(ierr);
330       /* spread partitioning across machine - best way ??? */
331       NN = 1; /*npe/new_npe;*/
332       for( kk = jj = 0 ; kk < is_sz ; kk++ ){
333         for( ii = 0 ; ii < a_cbs ; ii++, jj++ ) {
334           isnewproc_idx[jj] = is_idx[kk] * NN; /* distribution */
335         }
336       }
337       ierr = ISRestoreIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
338       ierr = ISDestroy( &isnewproc );                    CHKERRQ(ierr);
339       ierr = PetscCommDestroy( &new_comm );              CHKERRQ(ierr);
340 
341       is_sz *= a_cbs;
342     }
343     else{
344       isnewproc_idx = 0;
345       is_sz = 0;
346     }
347     ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
348     ierr = ISCreateGeneral( wcomm, is_sz, isnewproc_idx, PETSC_COPY_VALUES, &isnewproc );
349     if( isnewproc_idx != 0 ) {
350       ierr = PetscFree( isnewproc_idx );  CHKERRQ(ierr);
351     }
352 
353     /*
354      Create an index set from the isnewproc index set to indicate the mapping TO
355      */
356     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
357     /*
358      Determine how many elements are assigned to each processor
359      */
360     inpe = npe;
361     ierr = ISPartitioningCount( isnewproc, inpe, counts ); CHKERRQ(ierr);
362     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
363     ncrs_new = counts[mype]/a_cbs;
364     strideNew = ncrs_new*a_ndata_rows;
365 #if defined PETSC_USE_LOG
366     ierr = PetscLogEventEnd(gamg_setup_events[SET12],0,0,0,0);   CHKERRQ(ierr);
367 #endif
368     /* Create a vector to contain the newly ordered element information */
369     ierr = VecCreate( wcomm, &dest_crd );
370     ierr = VecSetSizes( dest_crd, data_sz*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
371     ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /* this is needed! */
372     /*
373      There are 'a_ndata_rows*a_ndata_cols' data items per node, (one can think of the vectors of having
374      a block size of ...).  Note, ISs are expanded into equation space by 'a_cbs'.
375      */
376     ierr = PetscMalloc( (ncrs0*data_sz)*sizeof(PetscInt), &tidx ); CHKERRQ(ierr);
377     ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
378     for(ii=0,jj=0; ii<ncrs0 ; ii++) {
379       PetscInt id = idx[ii*a_cbs]/a_cbs; /* get node back */
380       for( kk=0; kk<data_sz ; kk++, jj++) tidx[jj] = id*data_sz + kk;
381     }
382     ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
383     ierr = ISCreateGeneral( wcomm, data_sz*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
384     CHKERRQ(ierr);
385     ierr = PetscFree( tidx );  CHKERRQ(ierr);
386     /*
387      Create a vector to contain the original vertex information for each element
388      */
389     ierr = VecCreateSeq( PETSC_COMM_SELF, data_sz*ncrs0, &src_crd ); CHKERRQ(ierr);
390     for( jj=0; jj<a_ndata_cols ; jj++ ) {
391       for( ii=0 ; ii<ncrs0 ; ii++) {
392 	for( kk=0; kk<a_ndata_rows ; kk++ ) {
393 	  PetscInt ix = ii*a_ndata_rows + kk + jj*stride0, jx = ii*data_sz + kk*a_ndata_cols + jj;
394           PetscScalar tt = (PetscScalar)data[ix];
395 	  ierr = VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES );  CHKERRQ(ierr);
396 	}
397       }
398     }
399     ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr);
400     ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr);
401     /*
402       Scatter the element vertex information (still in the original vertex ordering)
403       to the correct processor
404     */
405     ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
406     CHKERRQ(ierr);
407     ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
408     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
409     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
410     ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
411     ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
412     /*
413       Put the element vertex data into a new allocation of the gdata->ele
414     */
415     ierr = PetscFree( *a_coarse_data );    CHKERRQ(ierr);
416     ierr = PetscMalloc( data_sz*ncrs_new*sizeof(PetscReal), a_coarse_data );    CHKERRQ(ierr);
417 
418     ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
419     data = *a_coarse_data;
420     for( jj=0; jj<a_ndata_cols ; jj++ ) {
421       for( ii=0 ; ii<ncrs_new ; ii++) {
422 	for( kk=0; kk<a_ndata_rows ; kk++ ) {
423 	  PetscInt ix = ii*a_ndata_rows + kk + jj*strideNew, jx = ii*data_sz + kk*a_ndata_cols + jj;
424 	  data[ix] = PetscRealPart(array[jx]);
425 	  array[jx] = 1.e300;
426 	}
427       }
428     }
429     ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
430     ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
431     /*
432       Invert for MatGetSubMatrix
433     */
434     ierr = ISInvertPermutation( isnum, ncrs_new*a_cbs, &new_indices ); CHKERRQ(ierr);
435     ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
436     ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
437     /* A_crs output */
438     ierr = MatGetSubMatrix( Cmat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
439     CHKERRQ(ierr);
440 
441     ierr = MatDestroy( &Cmat ); CHKERRQ(ierr);
442     Cmat = *a_Amat_crs; /* output */
443     ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
444 
445     /* prolongator */
446     ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
447     {
448       IS findices;
449       ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
450 #ifdef USE_R
451       ierr = MatGetSubMatrix( Pold, new_indices, findices, MAT_INITIAL_MATRIX, &Pnew );
452 #else
453       ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
454 #endif
455       CHKERRQ(ierr);
456 
457       ierr = ISDestroy( &findices ); CHKERRQ(ierr);
458     }
459 
460     ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
461     *a_P_inout = Pnew; /* output */
462 
463     ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
464     ierr = PetscFree( counts );  CHKERRQ(ierr);
465     ierr = PetscFree( ranks );  CHKERRQ(ierr);
466   }
467 
468   PetscFunctionReturn(0);
469 }
470 
471 /* -------------------------------------------------------------------------- */
472 /*
473    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
474                     by setting data structures and options.
475 
476    Input Parameter:
477 .  pc - the preconditioner context
478 
479    Application Interface Routine: PCSetUp()
480 
481    Notes:
482    The interface routine PCSetUp() is not usually called directly by
483    the user, but instead is called by PCApply() if necessary.
484 */
485 #undef __FUNCT__
486 #define __FUNCT__ "PCSetUp_GAMG"
487 PetscErrorCode PCSetUp_GAMG( PC a_pc )
488 {
489   PetscErrorCode  ierr;
490   PC_MG           *mg = (PC_MG*)a_pc->data;
491   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
492   PC_MG_Levels   **mglevels = mg->levels;
493   Mat              Amat = a_pc->mat, Pmat = a_pc->pmat;
494   PetscInt         fine_level, level, level1, M, N, bs, nloc, lidx, Istart, Iend;
495   MPI_Comm         wcomm = ((PetscObject)a_pc)->comm;
496   PetscMPIInt      mype,npe,nactivepe;
497   PetscBool        isOK;
498   Mat              Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];
499   PetscReal       *coarse_data = 0, *data, emaxs[GAMG_MAXLEVELS];
500   MatInfo          info;
501 
502   PetscFunctionBegin;
503   pc_gamg->m_count++;
504   if( a_pc->setupcalled > 0 ) {
505     /* just do Galerkin grids */
506     Mat B,dA,dB;
507 
508     /* PCSetUp_MG seems to insists on setting this to GMRES */
509     ierr = KSPSetType( mglevels[0]->smoothd, KSPPREONLY ); CHKERRQ(ierr);
510 
511     /* currently only handle case where mat and pmat are the same on coarser levels */
512     ierr = KSPGetOperators(mglevels[pc_gamg->m_Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);CHKERRQ(ierr);
513     /* (re)set to get dirty flag */
514     ierr = KSPSetOperators(mglevels[pc_gamg->m_Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
515     ierr = KSPSetUp( mglevels[pc_gamg->m_Nlevels-1]->smoothd ); CHKERRQ(ierr);
516 
517     for (level=pc_gamg->m_Nlevels-2; level>-1; level--) {
518       ierr = KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);CHKERRQ(ierr);
519       /* the first time through the matrix structure has changed from repartitioning */
520       if( pc_gamg->m_count == 2 ) {
521         ierr = MatDestroy( &B );  CHKERRQ(ierr);
522         ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
523         mglevels[level]->A = B;
524       }
525       else {
526         ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
527       }
528       ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
529       dB = B;
530       /* setup KSP/PC */
531       ierr = KSPSetUp( mglevels[level]->smoothd ); CHKERRQ(ierr);
532     }
533 
534 #define PRINT_MATS !PETSC_TRUE
535     /* plot levels - A */
536     if( PRINT_MATS ) {
537       for (lidx=0, level=pc_gamg->m_Nlevels-1; level>0 ; level--,lidx++){
538         PetscViewer viewer;
539         char fname[32]; KSP smoother; Mat Tmat, TTm;
540         ierr = PCMGGetSmoother( a_pc, lidx, &smoother ); CHKERRQ(ierr);
541         ierr = KSPGetOperators( smoother, &Tmat, &TTm, 0 ); CHKERRQ(ierr);
542         sprintf(fname,"Amat_%d_%d.m",(int)pc_gamg->m_count,(int)level);
543         ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
544         ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
545         ierr = MatView( Tmat, viewer ); CHKERRQ(ierr);
546         ierr = PetscViewerDestroy( &viewer );
547       }
548     }
549 
550     a_pc->setupcalled = 2;
551 
552     PetscFunctionReturn(0);
553   }
554   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
555   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
556   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
557   ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr);
558   ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr);
559   ierr = MatGetOwnershipRange( Amat, &Istart, &Iend ); CHKERRQ(ierr);
560   nloc = (Iend-Istart)/bs; assert((Iend-Istart)%bs == 0);
561 
562   /* get data of not around */
563   if( pc_gamg->m_data == 0 && nloc > 0 ) {
564     ierr  = PCSetCoordinates_GAMG( a_pc, -1, 0 );    CHKERRQ( ierr );
565   }
566   data = pc_gamg->m_data;
567 
568   /* Get A_i and R_i */
569   ierr = MatGetInfo(Amat,MAT_GLOBAL_SUM,&info); CHKERRQ(ierr);
570 #ifdef VERBOSE
571   PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
572 	      mype,__FUNCT__,0,N,pc_gamg->m_data_rows,pc_gamg->m_data_cols,
573 	      (int)(info.nz_used/(PetscReal)N),npe);
574 #endif
575   for ( level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */
576         level < (GAMG_MAXLEVELS-1) && (level==0 || M>TOP_GRID_LIM); /* && (npe==1 || nactivepe>1); */
577         level++ ){
578     level1 = level + 1;
579 #if (defined PETSC_USE_LOG && defined GAMG_STAGES)
580     ierr = PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr );
581 #endif
582 #if defined PETSC_USE_LOG
583     ierr = PetscLogEventBegin(gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
584 #endif
585     ierr = createProlongation(Aarr[level], data, pc_gamg->m_dim, pc_gamg->m_data_cols, pc_gamg->m_method,
586                               level, s_threshold, &bs, &Parr[level1], &coarse_data, &isOK, &emaxs[level] );
587     CHKERRQ(ierr);
588     ierr = PetscFree( data ); CHKERRQ( ierr );
589 #if defined PETSC_USE_LOG
590     ierr = PetscLogEventEnd(gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
591 #endif
592     if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */
593     if( isOK ) {
594 #if defined PETSC_USE_LOG
595       ierr = PetscLogEventBegin(gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
596 #endif
597       ierr = partitionLevel( Aarr[level], (pc_gamg->m_method != 0) ? bs : 1, pc_gamg->m_data_cols, bs,
598                              &Parr[level1], &coarse_data, &nactivepe, &Aarr[level1] );
599       CHKERRQ(ierr);
600 #if defined PETSC_USE_LOG
601       ierr = PetscLogEventEnd(gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
602 #endif
603       ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
604       ierr = MatGetInfo(Aarr[level1],MAT_GLOBAL_SUM,&info); CHKERRQ(ierr);
605 #ifdef VERBOSE
606       PetscPrintf(PETSC_COMM_WORLD,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
607 		  mype,__FUNCT__,(int)level1,N,pc_gamg->m_data_cols,
608 		  (int)(info.nz_used/(PetscReal)N),nactivepe);
609 #endif
610       /* coarse grids with SA can have zero row/cols from singleton aggregates */
611       /* aggregation method should gaurrentee this does not happen! */
612 
613 #ifdef VERBOSE
614       if( PETSC_TRUE ){
615         Vec diag; PetscScalar *data_arr,v; PetscInt Istart,Iend,kk,nloceq,id;
616         v = 1.e-10; /* LU factor has hard wired numbers for small diags so this needs to match (yuk) */
617         ierr = MatGetOwnershipRange(Aarr[level1], &Istart, &Iend); CHKERRQ(ierr);
618         nloceq = Iend-Istart;
619         ierr = MatGetVecs( Aarr[level1], &diag, 0 );    CHKERRQ(ierr);
620         ierr = MatGetDiagonal( Aarr[level1], diag );    CHKERRQ(ierr);
621         ierr = VecGetArray( diag, &data_arr );   CHKERRQ(ierr);
622         for(kk=0;kk<nloceq;kk++){
623           if(data_arr[kk]==0.0) {
624             id = kk + Istart;
625             ierr = MatSetValues(Aarr[level1],1,&id,1,&id,&v,INSERT_VALUES);
626             CHKERRQ(ierr);
627             PetscPrintf(PETSC_COMM_SELF,"\t[%d]%s warning: added zero to diag (%d) on level %d \n",mype,__FUNCT__,id,level1);
628           }
629         }
630         ierr = VecRestoreArray( diag, &data_arr ); CHKERRQ(ierr);
631         ierr = VecDestroy( &diag );                CHKERRQ(ierr);
632         ierr = MatAssemblyBegin(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
633         ierr = MatAssemblyEnd(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
634       }
635 #endif
636     }
637     else{
638       coarse_data = 0;
639       break;
640     }
641     data = coarse_data;
642 
643 #if (defined PETSC_USE_LOG && defined GAMG_STAGES)
644     ierr = PetscLogStagePop(); CHKERRQ( ierr );
645 #endif
646   }
647   if( coarse_data ) {
648     ierr = PetscFree( coarse_data ); CHKERRQ( ierr );
649   }
650 #ifdef VERBOSE
651   PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1);
652 #endif
653   pc_gamg->m_data = 0; /* destroyed coordinate data */
654   pc_gamg->m_Nlevels = level + 1;
655   fine_level = level;
656   ierr = PCMGSetLevels(a_pc,pc_gamg->m_Nlevels,PETSC_NULL);CHKERRQ(ierr);
657 
658   /* set default smoothers */
659   for ( lidx=1, level = pc_gamg->m_Nlevels-2;
660         lidx <= fine_level;
661         lidx++, level--) {
662     PetscReal emax, emin;
663     KSP smoother; PC subpc;
664     ierr = PCMGGetSmoother( a_pc, lidx, &smoother ); CHKERRQ(ierr);
665     ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr);
666     if( emaxs[level] > 0.0 ) emax=emaxs[level];
667     else{ /* eigen estimate 'emax' */
668       KSP eksp; Mat Lmat = Aarr[level];
669       Vec bb, xx; PC pc;
670 
671       ierr = MatGetVecs( Lmat, &bb, 0 );         CHKERRQ(ierr);
672       ierr = MatGetVecs( Lmat, &xx, 0 );         CHKERRQ(ierr);
673       {
674 	PetscRandom    rctx;
675 	ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr);
676 	ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
677 	ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
678 	ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr);
679       }
680       ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr);
681       ierr = KSPSetType( eksp, KSPCG );                      CHKERRQ(ierr);
682       ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr);
683       ierr = KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr );
684       ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr );
685       ierr = PCSetType( pc, PETSC_GAMG_SMOOTHER ); CHKERRQ(ierr); /* should be same as eigen estimates op. */
686       ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 );
687       CHKERRQ(ierr);
688       ierr = KSPSetNormType( eksp, KSP_NORM_NONE );                 CHKERRQ(ierr);
689 
690       ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr);
691       ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr);
692       ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr);
693       ierr = VecDestroy( &xx );       CHKERRQ(ierr);
694       ierr = VecDestroy( &bb );       CHKERRQ(ierr);
695       ierr = KSPDestroy( &eksp );       CHKERRQ(ierr);
696 #ifdef VERBOSE
697       PetscPrintf(PETSC_COMM_WORLD,"\t\t\t%s PC setup max eigen=%e min=%e PC=%s\n",__FUNCT__,emax,emin,PETSC_GAMG_SMOOTHER);
698 #endif
699     }
700     {
701       PetscInt N1, N0, tt;
702       ierr = MatGetSize( Aarr[level], &N1, &tt );         CHKERRQ(ierr);
703       ierr = MatGetSize( Aarr[level+1], &N0, &tt );       CHKERRQ(ierr);
704       emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); /* this should be about the coarsening rate */
705       emax *= 1.05;
706     }
707 
708     ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );
709     ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr);
710     /* ierr = KSPSetTolerances(smoother,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,2); CHKERRQ(ierr); */
711     ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
712     ierr = PCSetType( subpc, PETSC_GAMG_SMOOTHER ); CHKERRQ(ierr);
713     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
714   }
715   {
716     /* coarse grid */
717     KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
718     Mat Lmat = Aarr[pc_gamg->m_Nlevels-1];
719     ierr = PCMGGetSmoother( a_pc, 0, &smoother ); CHKERRQ(ierr);
720     ierr = KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ(ierr);
721     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
722     ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
723     ierr = PCSetType( subpc, PCBJACOBI ); CHKERRQ(ierr);
724     ierr = PCSetUp( subpc ); CHKERRQ(ierr);
725     ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
726     assert(ii==1);
727     ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
728     ierr = PCSetType( pc2, PCLU ); CHKERRQ(ierr);
729   }
730 
731   /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
732   ierr = PCSetFromOptions_MG(a_pc); CHKERRQ(ierr);
733   {
734     PetscBool galerkin;
735     ierr = PCMGGetGalerkin( a_pc,  &galerkin); CHKERRQ(ierr);
736     if(galerkin){
737       SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG.");
738     }
739   }
740 
741   /* plot levels - R/P */
742   if( PRINT_MATS ) {
743     for (level=pc_gamg->m_Nlevels-1;level>0;level--){
744       PetscViewer viewer;
745       char fname[32];
746       sprintf(fname,"Pmat_%d_%d.m",(int)pc_gamg->m_count,(int)level);
747       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
748       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
749       ierr = MatView( Parr[level], viewer ); CHKERRQ(ierr);
750       ierr = PetscViewerDestroy( &viewer );
751       sprintf(fname,"Amat_%d_%d.m",(int)pc_gamg->m_count,(int)level);
752       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
753       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
754       ierr = MatView( Aarr[level], viewer ); CHKERRQ(ierr);
755       ierr = PetscViewerDestroy( &viewer );
756     }
757   }
758 
759   /* set interpolation between the levels, clean up */
760   for (lidx=0,level=pc_gamg->m_Nlevels-1;
761        lidx<fine_level;
762        lidx++, level--){
763     ierr = PCMGSetInterpolation( a_pc, lidx+1, Parr[level] );CHKERRQ(ierr);
764     ierr = MatDestroy( &Parr[level] );  CHKERRQ(ierr);
765     ierr = MatDestroy( &Aarr[level] );  CHKERRQ(ierr);
766   }
767 
768   /* setupcalled is set to 0 so that MG is setup from scratch */
769   a_pc->setupcalled = 0;
770   ierr = PCSetUp_MG( a_pc );CHKERRQ( ierr );
771   a_pc->setupcalled = 1; /* use 1 as signal that this has not been re-setup */
772 
773   {
774     KSP smoother;  /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
775     ierr = PCMGGetSmoother( a_pc, 0, &smoother ); CHKERRQ(ierr);
776     ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr);
777     ierr = KSPSetUp( smoother ); CHKERRQ(ierr);
778   }
779 
780   PetscFunctionReturn(0);
781 }
782 
783 /* ------------------------------------------------------------------------- */
784 /*
785    PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
786    that was created with PCCreate_GAMG().
787 
788    Input Parameter:
789 .  pc - the preconditioner context
790 
791    Application Interface Routine: PCDestroy()
792 */
793 #undef __FUNCT__
794 #define __FUNCT__ "PCDestroy_GAMG"
795 PetscErrorCode PCDestroy_GAMG(PC pc)
796 {
797   PetscErrorCode  ierr;
798   PC_MG           *mg = (PC_MG*)pc->data;
799   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
800 
801   PetscFunctionBegin;
802   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
803   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
804   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
805   PetscFunctionReturn(0);
806 }
807 
808 
809 #undef __FUNCT__
810 #define __FUNCT__ "PCGAMGSetProcEqLim"
811 /*@
812    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
813    processor reduction.
814 
815    Not Collective on PC
816 
817    Input Parameters:
818 .  pc - the preconditioner context
819 
820 
821    Options Database Key:
822 .  -pc_gamg_process_eq_limit
823 
824    Level: intermediate
825 
826    Concepts: Unstructured multrigrid preconditioner
827 
828 .seealso: ()
829 @*/
830 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
831 {
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
836   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 EXTERN_C_BEGIN
841 #undef __FUNCT__
842 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG"
843 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
844 {
845   /* PC_MG           *mg = (PC_MG*)pc->data; */
846   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
847 
848   PetscFunctionBegin;
849   if(n>0) s_min_eq_proc = n;
850   PetscFunctionReturn(0);
851 }
852 EXTERN_C_END
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "PCGAMGAvoidRepartitioning"
856 /*@
857    PCGAMGAvoidRepartitioning - Do not repartition the coarse grids
858 
859    Collective on PC
860 
861    Input Parameters:
862 .  pc - the preconditioner context
863 
864 
865    Options Database Key:
866 .  -pc_gamg_avoid_repartitioning
867 
868    Level: intermediate
869 
870    Concepts: Unstructured multrigrid preconditioner
871 
872 .seealso: ()
873 @*/
874 PetscErrorCode PCGAMGAvoidRepartitioning(PC pc, PetscBool n)
875 {
876   PetscErrorCode ierr;
877 
878   PetscFunctionBegin;
879   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
880   ierr = PetscTryMethod(pc,"PCGAMGAvoidRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
881   PetscFunctionReturn(0);
882 }
883 
884 EXTERN_C_BEGIN
885 #undef __FUNCT__
886 #define __FUNCT__ "PCGAMGAvoidRepartitioning_GAMG"
887 PetscErrorCode PCGAMGAvoidRepartitioning_GAMG(PC pc, PetscBool n)
888 {
889   /* PC_MG           *mg = (PC_MG*)pc->data; */
890   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
891 
892   PetscFunctionBegin;
893   s_avoid_repart = n;
894   PetscFunctionReturn(0);
895 }
896 EXTERN_C_END
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "PCGAMGSetThreshold"
900 /*@
901    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
902 
903    Not collective on PC
904 
905    Input Parameters:
906 .  pc - the preconditioner context
907 
908 
909    Options Database Key:
910 .  -pc_gamg_threshold
911 
912    Level: intermediate
913 
914    Concepts: Unstructured multrigrid preconditioner
915 
916 .seealso: ()
917 @*/
918 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
919 {
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
924   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 EXTERN_C_BEGIN
929 #undef __FUNCT__
930 #define __FUNCT__ "PCGAMGSetThreshold_GAMG"
931 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
932 {
933   /* PC_MG           *mg = (PC_MG*)pc->data; */
934   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
935 
936   PetscFunctionBegin;
937   s_threshold = n;
938   PetscFunctionReturn(0);
939 }
940 EXTERN_C_END
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "PCGAMGSetSolverType"
944 /*@
945    PCGAMGSetSolverType - Set solution method.
946 
947    Collective on PC
948 
949    Input Parameters:
950 .  pc - the preconditioner context
951 
952 
953    Options Database Key:
954 .  -pc_gamg_type
955 
956    Level: intermediate
957 
958    Concepts: Unstructured multrigrid preconditioner
959 
960 .seealso: ()
961 @*/
962 PetscErrorCode PCGAMGSetSolverType(PC pc, char str[], PetscInt sz )
963 {
964   PetscErrorCode ierr;
965 
966   PetscFunctionBegin;
967   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
968   ierr = PetscTryMethod(pc,"PCGAMGSetSolverType_C",(PC,char[],PetscInt),(pc,str,sz));
969   CHKERRQ(ierr);
970   PetscFunctionReturn(0);
971 }
972 
973 EXTERN_C_BEGIN
974 #undef __FUNCT__
975 #define __FUNCT__ "PCGAMGSetSolverType_GAMG"
976 PetscErrorCode PCGAMGSetSolverType_GAMG(PC pc, char str[], PetscInt sz )
977 {
978   PC_MG           *mg = (PC_MG*)pc->data;
979   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
980 
981   PetscFunctionBegin;
982   if(sz < 64) strcpy(pc_gamg->m_type,str);
983   PetscFunctionReturn(0);
984 }
985 EXTERN_C_END
986 
987 #undef __FUNCT__
988 #define __FUNCT__ "PCSetFromOptions_GAMG"
989 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
990 {
991   PetscErrorCode  ierr;
992   PC_MG           *mg = (PC_MG*)pc->data;
993   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
994   PetscBool flag;
995 
996   PetscFunctionBegin;
997   ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr);
998   {
999     ierr = PetscOptionsString("-pc_gamg_type",
1000                               "Solver type: plane aggregation ('pa'), smoothed aggregation ('sa') or geometric multigrid (default)",
1001                               "PCGAMGSetSolverType",
1002                               pc_gamg->m_type,
1003                               pc_gamg->m_type,
1004                               64,
1005                               &flag );
1006     CHKERRQ(ierr);
1007     if( flag && pc_gamg->m_data != 0 ) {
1008       if( (strcmp(pc_gamg->m_type,"sa")==0 && pc_gamg->m_method != 2) ||
1009           (strcmp(pc_gamg->m_type,"pa")==0 && pc_gamg->m_method != 1) ||
1010           (strcmp(pc_gamg->m_type,"geo")==0 && pc_gamg->m_method != 0) ) {
1011         SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "PCSetFromOptions called of PCSetCoordinates (with new method, after data was created)");
1012       }
1013     }
1014 
1015     if (flag && strcmp(pc_gamg->m_type,"sa") == 0) pc_gamg->m_method = 2;
1016     else if (flag && strcmp(pc_gamg->m_type,"pa") == 0) pc_gamg->m_method = 1;
1017     else pc_gamg->m_method = 0;
1018 
1019     /* common (static) variable */
1020     ierr = PetscOptionsBool("-pc_gamg_avoid_repartitioning",
1021                             "Do not repartion coarse grids (false)",
1022                             "PCGAMGAvoidRepartitioning",
1023                             s_avoid_repart,
1024                             &s_avoid_repart,
1025                             &flag);
1026     CHKERRQ(ierr);
1027 
1028     /* common (static) variable */
1029     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit",
1030                            "Limit (goal) on number of equations per process on coarse grids",
1031                            "PCGAMGSetProcEqLim",
1032                            s_min_eq_proc,
1033                            &s_min_eq_proc,
1034                            &flag );
1035     CHKERRQ(ierr);
1036 
1037     /* common (static) variable */
1038     ierr = PetscOptionsReal("-pc_gamg_threshold",
1039                             "Relative threshold to use for dropping edges in aggregation graph",
1040                             "PCGAMGSetThreshold",
1041                             s_threshold,
1042                             &s_threshold,
1043                             &flag );
1044     CHKERRQ(ierr);
1045   }
1046   ierr = PetscOptionsTail();CHKERRQ(ierr);
1047 
1048   PetscFunctionReturn(0);
1049 }
1050 
1051 /* -------------------------------------------------------------------------- */
1052 /*
1053  PCCreate_GAMG - Creates a GAMG preconditioner context, PC_GAMG
1054 
1055    Input Parameter:
1056 .  pc - the preconditioner context
1057 
1058    Application Interface Routine: PCCreate()
1059 
1060   */
1061  /* MC
1062      PCGAMG - Use algebraic multigrid preconditioning. This preconditioner requires you provide
1063        fine grid discretization matrix and coordinates on the fine grid.
1064 
1065    Options Database Key:
1066    Multigrid options(inherited)
1067 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (MGSetCycles)
1068 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (MGSetNumberSmoothUp)
1069 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (MGSetNumberSmoothDown)
1070    -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
1071    GAMG options:
1072 
1073    Level: intermediate
1074   Concepts: multigrid
1075 
1076 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1077            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), MPSetCycles(), PCMGSetNumberSmoothDown(),
1078            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1079            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1080            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1081 M */
1082 
1083 EXTERN_C_BEGIN
1084 #undef __FUNCT__
1085 #define __FUNCT__ "PCCreate_GAMG"
1086 PetscErrorCode  PCCreate_GAMG(PC pc)
1087 {
1088   PetscErrorCode  ierr;
1089   PC_GAMG         *pc_gamg;
1090   PC_MG           *mg;
1091   PetscClassId     cookie;
1092 #if defined PETSC_USE_LOG
1093   static int count = 0;
1094 #endif
1095 
1096   PetscFunctionBegin;
1097   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1098   ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
1099   ierr = PetscObjectChangeTypeName((PetscObject)pc,PCGAMG);CHKERRQ(ierr);
1100 
1101   /* create a supporting struct and attach it to pc */
1102   ierr = PetscNewLog(pc,PC_GAMG,&pc_gamg);CHKERRQ(ierr);
1103   pc_gamg->m_data_sz = 0; pc_gamg->m_data = 0; pc_gamg->m_count = 0;
1104   mg = (PC_MG*)pc->data;
1105   mg->innerctx = pc_gamg;
1106 
1107   pc_gamg->m_Nlevels    = -1;
1108 
1109   /* overwrite the pointers of PCMG by the functions of PCGAMG */
1110   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1111   pc->ops->setup          = PCSetUp_GAMG;
1112   pc->ops->reset          = PCReset_GAMG;
1113   pc->ops->destroy        = PCDestroy_GAMG;
1114 
1115   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1116 					    "PCSetCoordinates_C",
1117 					    "PCSetCoordinates_GAMG",
1118 					    PCSetCoordinates_GAMG);
1119   CHKERRQ(ierr);
1120 
1121   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1122 					    "PCGAMGSetProcEqLim_C",
1123 					    "PCGAMGSetProcEqLim_GAMG",
1124 					    PCGAMGSetProcEqLim_GAMG);
1125   CHKERRQ(ierr);
1126 
1127   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1128 					    "PCGAMGAvoidRepartitioning_C",
1129 					    "PCGAMGAvoidRepartitioning_GAMG",
1130 					    PCGAMGAvoidRepartitioning_GAMG);
1131   CHKERRQ(ierr);
1132 
1133   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1134 					    "PCGAMGSetThreshold_C",
1135 					    "PCGAMGSetThreshold_GAMG",
1136 					    PCGAMGSetThreshold_GAMG);
1137   CHKERRQ(ierr);
1138 
1139   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1140 					    "PCGAMGSetSolverType_C",
1141 					    "PCGAMGSetSolverType_GAMG",
1142 					    PCGAMGSetSolverType_GAMG);
1143   CHKERRQ(ierr);
1144 
1145 #if defined PETSC_USE_LOG
1146   if( count++ == 0 ) {
1147     PetscClassIdRegister("GAMG Setup",&cookie);
1148     PetscLogEventRegister("GAMG: createProl", cookie, &gamg_setup_events[SET1]);
1149     PetscLogEventRegister("  Graph", cookie, &gamg_setup_events[GRAPH]);
1150     PetscLogEventRegister("    G.Mat", cookie, &gamg_setup_events[GRAPH_MAT]);
1151     PetscLogEventRegister("    G.Filter", cookie, &gamg_setup_events[GRAPH_FILTER]);
1152     PetscLogEventRegister("    G.Square", cookie, &gamg_setup_events[GRAPH_SQR]);
1153     PetscLogEventRegister("  MIS/Agg", cookie, &gamg_setup_events[SET4]);
1154     PetscLogEventRegister("  geo: growSupp", cookie, &gamg_setup_events[SET5]);
1155     PetscLogEventRegister("  geo: triangle", cookie, &gamg_setup_events[SET6]);
1156     PetscLogEventRegister("    search&set", cookie, &gamg_setup_events[FIND_V]);
1157     PetscLogEventRegister("  SA: init", cookie, &gamg_setup_events[SET7]);
1158     /* PetscLogEventRegister("  SA: frmProl0", cookie, &gamg_setup_events[SET8]); */
1159     PetscLogEventRegister("  SA: smooth", cookie, &gamg_setup_events[SET9]);
1160     PetscLogEventRegister("GAMG: partLevel", cookie, &gamg_setup_events[SET2]);
1161     PetscLogEventRegister("  PL repartition", cookie, &gamg_setup_events[SET12]);
1162     /* PetscLogEventRegister(" PL move data", cookie, &gamg_setup_events[SET13]); */
1163     /* PetscLogEventRegister("GAMG: fix", cookie, &gamg_setup_events[SET10]); */
1164     /* PetscLogEventRegister("GAMG: set levels", cookie, &gamg_setup_events[SET11]); */
1165 
1166     /* create timer stages */
1167 #if defined GAMG_STAGES
1168     {
1169       char str[32];
1170       sprintf(str,"MG Level %d (finest)",0);
1171       PetscLogStageRegister(str, &gamg_stages[0]);
1172       PetscInt lidx;
1173       for (lidx=1;lidx<9;lidx++){
1174 	sprintf(str,"MG Level %d",lidx);
1175 	PetscLogStageRegister(str, &gamg_stages[lidx]);
1176       }
1177     }
1178 #endif
1179   }
1180 #endif
1181   PetscFunctionReturn(0);
1182 }
1183 EXTERN_C_END
1184