xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision 794bee337f0d5af71ccfe93f8f7f88922f7fb150)
1 /*
2  GAMG geometric-algebric multiogrid PC - Mark Adams 2011
3  */
4 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
5 #include "private/matimpl.h"
6 
7 #if defined PETSC_USE_LOG
8 PetscLogEvent gamg_setup_events[NUM_SET];
9 #endif
10 #define GAMG_MAXLEVELS 30
11 
12 /*#define GAMG_STAGES*/
13 #if (defined PETSC_USE_LOG && defined GAMG_STAGES)
14 static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
15 #endif
16 
17 /* Private context for the GAMG preconditioner */
18 static PetscBool s_avoid_repart = PETSC_FALSE;
19 static PetscInt s_min_eq_proc = 600;
20 static PetscReal s_threshold = 0.05;
21 typedef struct gamg_TAG {
22   PetscInt       m_dim;
23   PetscInt       m_Nlevels;
24   PetscInt       m_data_sz;
25   PetscInt       m_data_rows;
26   PetscInt       m_data_cols;
27   PetscInt       m_count;
28   PetscInt       m_method; /* 0: geomg; 1: plain agg 'pa'; 2: smoothed agg 'sa' */
29   PetscReal     *m_data; /* blocked vector of vertex data on fine grid (coordinates) */
30   char           m_type[64];
31 } PC_GAMG;
32 
33 /* -------------------------------------------------------------------------- */
34 /*
35    PCSetCoordinates_GAMG
36 
37    Input Parameter:
38    .  pc - the preconditioner context
39 */
40 EXTERN_C_BEGIN
41 #undef __FUNCT__
42 #define __FUNCT__ "PCSetCoordinates_GAMG"
43 PetscErrorCode PCSetCoordinates_GAMG( PC a_pc, PetscInt a_ndm, PetscReal *a_coords )
44 {
45   PC_MG          *mg = (PC_MG*)a_pc->data;
46   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
47   PetscErrorCode ierr;
48   PetscInt       arrsz,bs,my0,kk,ii,jj,nloc,Iend;
49   Mat            Amat = a_pc->pmat;
50 
51   PetscFunctionBegin;
52   PetscValidHeaderSpecific( Amat, MAT_CLASSID, 1 );
53   ierr  = MatGetBlockSize( Amat, &bs );               CHKERRQ( ierr );
54   ierr  = MatGetOwnershipRange( Amat, &my0, &Iend ); CHKERRQ(ierr);
55   nloc = (Iend-my0)/bs;
56   if((Iend-my0)%bs!=0) SETERRQ1(((PetscObject)Amat)->comm,PETSC_ERR_ARG_WRONG, "Bad local size %d.",nloc);
57 
58   pc_gamg->m_data_rows = 1;
59   if(a_coords==0 && pc_gamg->m_method==0) pc_gamg->m_method = 2; /* use SA if no coords */
60   if( pc_gamg->m_method==0 ) pc_gamg->m_data_cols = a_ndm; /* coordinates */
61   else{ /* SA: null space vectors */
62     if(a_coords != 0 && bs==1 ) pc_gamg->m_data_cols = 1; /* scalar w/ coords and SA (not needed) */
63     else if(a_coords != 0 ) pc_gamg->m_data_cols = (a_ndm==2 ? 3 : 6); /* elasticity */
64     else pc_gamg->m_data_cols = bs; /* no data, force SA with constant null space vectors */
65     pc_gamg->m_data_rows = bs;
66   }
67   arrsz = nloc*pc_gamg->m_data_rows*pc_gamg->m_data_cols;
68 
69   /* create data - syntactic sugar that should be refactored at some point */
70   if (pc_gamg->m_data==0 || (pc_gamg->m_data_sz != arrsz)) {
71     ierr = PetscFree( pc_gamg->m_data );  CHKERRQ(ierr);
72     ierr = PetscMalloc((arrsz+1)*sizeof(double), &pc_gamg->m_data ); CHKERRQ(ierr);
73   }
74   for(kk=0;kk<arrsz;kk++)pc_gamg->m_data[kk] = -999.;
75   pc_gamg->m_data[arrsz] = -99.;
76   /* copy data in - column oriented */
77   if( pc_gamg->m_method != 0 ) {
78     const PetscInt M = Iend - my0;
79     for(kk=0;kk<nloc;kk++){
80       PetscReal *data = &pc_gamg->m_data[kk*bs];
81       if( pc_gamg->m_data_cols==1 ) *data = 1.0;
82       else {
83         for(ii=0;ii<bs;ii++)
84 	  for(jj=0;jj<bs;jj++)
85 	    if(ii==jj)data[ii*M + jj] = 1.0; /* translational modes */
86 	    else data[ii*M + jj] = 0.0;
87         if( a_coords != 0 ) {
88           if( a_ndm == 2 ){ /* rotational modes */
89             data += 2*M;
90             data[0] = -a_coords[2*kk+1];
91             data[1] =  a_coords[2*kk];
92           }
93           else {
94             data += 3*M;
95             data[0] = 0.0;               data[M+0] =  a_coords[3*kk+2]; data[2*M+0] = -a_coords[3*kk+1];
96             data[1] = -a_coords[3*kk+2]; data[M+1] = 0.0;               data[2*M+1] =  a_coords[3*kk];
97             data[2] =  a_coords[3*kk+1]; data[M+2] = -a_coords[3*kk];   data[2*M+2] = 0.0;
98           }
99         }
100       }
101     }
102   }
103   else {
104     for( kk = 0 ; kk < nloc ; kk++ ){
105       for( ii = 0 ; ii < a_ndm ; ii++ ) {
106         pc_gamg->m_data[ii*nloc + kk] =  a_coords[kk*a_ndm + ii];
107       }
108     }
109   }
110   assert(pc_gamg->m_data[arrsz] == -99.);
111 
112   pc_gamg->m_data_sz = arrsz;
113   pc_gamg->m_dim = a_ndm;
114 
115   PetscFunctionReturn(0);
116 }
117 EXTERN_C_END
118 
119 
120 /* ----------------------------------------------------------------------------- */
121 #undef __FUNCT__
122 #define __FUNCT__ "PCReset_GAMG"
123 PetscErrorCode PCReset_GAMG(PC pc)
124 {
125   PetscErrorCode  ierr;
126   PC_MG           *mg = (PC_MG*)pc->data;
127   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
128 
129   PetscFunctionBegin;
130   if( pc_gamg->m_data != 0 ) { /* this should not happen, cleaned up in SetUp */
131     ierr = PetscFree(pc_gamg->m_data); CHKERRQ(ierr);
132   }
133   pc_gamg->m_data = 0; pc_gamg->m_data_sz = 0;
134   PetscFunctionReturn(0);
135 }
136 
137 /* -------------------------------------------------------------------------- */
138 /*
139    partitionLevel
140 
141    Input Parameter:
142    . a_Amat_fine - matrix on this fine (k) level
143    . a_ndata_rows - size of data to move (coarse grid)
144    . a_ndata_cols - size of data to move (coarse grid)
145    In/Output Parameter:
146    . a_P_inout - prolongation operator to the next level (k-1)
147    . a_coarse_data - data that need to be moved
148    . a_nactive_proc - number of active procs
149    Output Parameter:
150    . a_Amat_crs - coarse matrix that is created (k-1)
151 */
152 
153 #define TOP_GRID_LIM 2*s_min_eq_proc /* this will happen anyway */
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "partitionLevel"
157 PetscErrorCode partitionLevel( Mat a_Amat_fine,
158                                PetscInt a_ndata_rows,
159                                PetscInt a_ndata_cols,
160 			       PetscInt a_cbs,
161                                Mat *a_P_inout,
162                                PetscReal **a_coarse_data,
163                                PetscMPIInt *a_nactive_proc,
164                                Mat *a_Amat_crs
165                                )
166 {
167   PetscErrorCode   ierr;
168   Mat              Cmat,Pnew,Pold=*a_P_inout;
169   IS               new_indices,isnum;
170   MPI_Comm         wcomm = ((PetscObject)a_Amat_fine)->comm;
171   PetscMPIInt      mype,npe,new_npe,nactive;
172   PetscInt         neq,NN,Istart,Iend,Istart0,Iend0,ncrs_new,ncrs0;
173 
174   PetscFunctionBegin;
175   ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr);
176   ierr = MPI_Comm_size( wcomm, &npe );  CHKERRQ(ierr);
177   /* RAP */
178   // #define USE_R
179 #ifdef USE_R
180   /* make R wih brute force for now */
181   ierr = MatTranspose( Pold, Pnew );
182   ierr = MatDestroy( &Pold );  CHKERRQ(ierr);
183   ierr = MatRARt( a_Amat_fine, Pnew, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
184   Pold = Pnew;
185 #else
186   ierr = MatPtAP( a_Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
187 #endif
188   ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
189   ierr = MatGetOwnershipRange( Cmat, &Istart0, &Iend0 ); CHKERRQ(ierr);
190   ncrs0 = (Iend0-Istart0)/a_cbs; assert((Iend0-Istart0)%a_cbs == 0);
191 
192   if( s_avoid_repart ) {
193     *a_Amat_crs = Cmat; /* output */
194   }
195   else {
196     /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
197     Mat              adj;
198     const PetscInt *idx,data_sz=a_ndata_rows*a_ndata_cols;
199     const PetscInt  stride0=ncrs0*a_ndata_rows;
200     PetscInt        is_sz,*isnewproc_idx,ii,jj,kk,strideNew,*tidx;
201     /* create sub communicator  */
202     MPI_Comm        cm,new_comm;
203     MPI_Group       wg, g2;
204     PetscInt       *counts,inpe;
205     PetscMPIInt    *ranks;
206     IS              isscat;
207     PetscScalar    *array;
208     Vec             src_crd, dest_crd;
209     PetscReal      *data = *a_coarse_data;
210     VecScatter      vecscat;
211     IS  isnewproc;
212 
213     /* get number of PEs to make active, reduce */
214     ierr = MatGetSize( Cmat, &neq, &NN );  CHKERRQ(ierr);
215     new_npe = neq/s_min_eq_proc; /* hardwire min. number of eq/proc */
216     if( new_npe == 0 || neq < TOP_GRID_LIM ) new_npe = 1;
217     else if (new_npe >= *a_nactive_proc ) new_npe = *a_nactive_proc; /* no change, rare */
218 
219     ierr = PetscMalloc( npe*sizeof(PetscMPIInt), &ranks ); CHKERRQ(ierr);
220     ierr = PetscMalloc( npe*sizeof(PetscInt), &counts ); CHKERRQ(ierr);
221 
222     ierr = MPI_Allgather( &ncrs0, 1, MPIU_INT, counts, 1, MPIU_INT, wcomm ); CHKERRQ(ierr);
223     assert(counts[mype]==ncrs0);
224     /* count real active pes */
225     for( nactive = jj = 0 ; jj < npe ; jj++) {
226       if( counts[jj] != 0 ) {
227 	ranks[nactive++] = jj;
228         }
229     }
230 
231     if (nactive < new_npe) new_npe = nactive; /* this can happen with empty input procs */
232 
233 #ifdef VERBOSE
234     PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s npe (active): %d --> %d. new npe = %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,nactive,new_npe,neq);
235 #endif
236 
237     *a_nactive_proc = new_npe; /* output */
238 
239     ierr = MPI_Comm_group( wcomm, &wg ); CHKERRQ(ierr);
240     ierr = MPI_Group_incl( wg, nactive, ranks, &g2 ); CHKERRQ(ierr);
241     ierr = MPI_Comm_create( wcomm, g2, &cm ); CHKERRQ(ierr);
242 
243     if( cm != MPI_COMM_NULL ) {
244       assert(ncrs0 != 0);
245       ierr = PetscCommDuplicate( cm, &new_comm, PETSC_NULL ); CHKERRQ(ierr);
246       ierr = MPI_Comm_free( &cm );                             CHKERRQ(ierr);
247     }
248     else assert(ncrs0 == 0);
249 
250     ierr = MPI_Group_free( &wg );                            CHKERRQ(ierr);
251     ierr = MPI_Group_free( &g2 );                            CHKERRQ(ierr);
252 
253     /* MatPartitioningApply call MatConvert, which is collective */
254 #if defined PETSC_USE_LOG
255     ierr = PetscLogEventBegin(gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
256 #endif
257     if( a_cbs == 1) {
258       ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
259     }
260     else{
261       /* make a scalar matrix to partition */
262       Mat tMat;
263       PetscInt ncols,jj,Ii;
264       const PetscScalar *vals;
265       const PetscInt *idx;
266       PetscInt *d_nnz;
267       static int llev = 0;
268 
269       ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr);
270       for ( Ii = Istart0, jj = 0 ; Ii < Iend0 ; Ii += a_cbs, jj++ ) {
271         ierr = MatGetRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr);
272         d_nnz[jj] = ncols/a_cbs;
273         if( d_nnz[jj] > ncrs0 ) d_nnz[jj] = ncrs0;
274         ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr);
275       }
276 
277       ierr = MatCreateMPIAIJ( wcomm, ncrs0, ncrs0,
278                               PETSC_DETERMINE, PETSC_DETERMINE,
279                               0, d_nnz, 0, d_nnz,
280                               &tMat );
281       CHKERRQ(ierr);
282       ierr = PetscFree( d_nnz ); CHKERRQ(ierr);
283 
284       for ( ii = Istart0; ii < Iend0; ii++ ) {
285         PetscInt dest_row = ii/a_cbs;
286         ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr);
287         for( jj = 0 ; jj < ncols ; jj++ ){
288           PetscInt dest_col = idx[jj]/a_cbs;
289           PetscScalar v = 1.0;
290           ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr);
291         }
292         ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr);
293       }
294       ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
295       ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
296 
297       if( llev++ == -1 ) {
298 	PetscViewer viewer; char fname[32];
299 	sprintf(fname,"part_mat_%d.mat",llev);
300 	PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer);
301 	ierr = MatView( tMat, viewer ); CHKERRQ(ierr);
302 	ierr = PetscViewerDestroy( &viewer );
303       }
304 
305       ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
306 
307       ierr = MatDestroy( &tMat );  CHKERRQ(ierr);
308     }
309     if( ncrs0 != 0 ){
310       const PetscInt *is_idx;
311       MatPartitioning  mpart;
312       /* hack to fix global data that pmetis.c uses in 'adj' */
313       for( nactive = jj = 0 ; jj < npe ; jj++ ) {
314 	if( counts[jj] != 0 ) {
315 	  adj->rmap->range[nactive++] = adj->rmap->range[jj];
316 	}
317       }
318       adj->rmap->range[nactive] = adj->rmap->range[npe];
319 
320       ierr = MatPartitioningCreate( new_comm, &mpart ); CHKERRQ(ierr);
321       ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
322       ierr = MatPartitioningSetFromOptions( mpart );    CHKERRQ(ierr);
323       ierr = MatPartitioningSetNParts( mpart, new_npe );CHKERRQ(ierr);
324       ierr = MatPartitioningApply( mpart, &isnewproc ); CHKERRQ(ierr);
325       ierr = MatPartitioningDestroy( &mpart );          CHKERRQ(ierr);
326 
327       /* collect IS info */
328       ierr = ISGetLocalSize( isnewproc, &is_sz );       CHKERRQ(ierr);
329       ierr = PetscMalloc( a_cbs*is_sz*sizeof(PetscInt), &isnewproc_idx ); CHKERRQ(ierr);
330       ierr = ISGetIndices( isnewproc, &is_idx );        CHKERRQ(ierr);
331       /* spread partitioning across machine - best way ??? */
332       NN = 1; /*npe/new_npe;*/
333       for( kk = jj = 0 ; kk < is_sz ; kk++ ){
334         for( ii = 0 ; ii < a_cbs ; ii++, jj++ ) {
335           isnewproc_idx[jj] = is_idx[kk] * NN; /* distribution */
336         }
337       }
338       ierr = ISRestoreIndices( isnewproc, &is_idx );     CHKERRQ(ierr);
339       ierr = ISDestroy( &isnewproc );                    CHKERRQ(ierr);
340       ierr = PetscCommDestroy( &new_comm );              CHKERRQ(ierr);
341 
342       is_sz *= a_cbs;
343     }
344     else{
345       isnewproc_idx = 0;
346       is_sz = 0;
347     }
348     ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
349     ierr = ISCreateGeneral( wcomm, is_sz, isnewproc_idx, PETSC_COPY_VALUES, &isnewproc );
350     if( isnewproc_idx != 0 ) {
351       ierr = PetscFree( isnewproc_idx );  CHKERRQ(ierr);
352     }
353 
354     /*
355      Create an index set from the isnewproc index set to indicate the mapping TO
356      */
357     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
358     /*
359      Determine how many elements are assigned to each processor
360      */
361     inpe = npe;
362     ierr = ISPartitioningCount( isnewproc, inpe, counts ); CHKERRQ(ierr);
363     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
364     ncrs_new = counts[mype]/a_cbs;
365     strideNew = ncrs_new*a_ndata_rows;
366 #if defined PETSC_USE_LOG
367     ierr = PetscLogEventEnd(gamg_setup_events[SET12],0,0,0,0);   CHKERRQ(ierr);
368 #endif
369     /* Create a vector to contain the newly ordered element information */
370     ierr = VecCreate( wcomm, &dest_crd );
371     ierr = VecSetSizes( dest_crd, data_sz*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
372     ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /* this is needed! */
373     /*
374      There are 'a_ndata_rows*a_ndata_cols' data items per node, (one can think of the vectors of having
375      a block size of ...).  Note, ISs are expanded into equation space by 'a_cbs'.
376      */
377     ierr = PetscMalloc( (ncrs0*data_sz)*sizeof(PetscInt), &tidx ); CHKERRQ(ierr);
378     ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
379     for(ii=0,jj=0; ii<ncrs0 ; ii++) {
380       PetscInt id = idx[ii*a_cbs]/a_cbs; /* get node back */
381       for( kk=0; kk<data_sz ; kk++, jj++) tidx[jj] = id*data_sz + kk;
382     }
383     ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
384     ierr = ISCreateGeneral( wcomm, data_sz*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
385     CHKERRQ(ierr);
386     ierr = PetscFree( tidx );  CHKERRQ(ierr);
387     /*
388      Create a vector to contain the original vertex information for each element
389      */
390     ierr = VecCreateSeq( PETSC_COMM_SELF, data_sz*ncrs0, &src_crd ); CHKERRQ(ierr);
391     for( jj=0; jj<a_ndata_cols ; jj++ ) {
392       for( ii=0 ; ii<ncrs0 ; ii++) {
393 	for( kk=0; kk<a_ndata_rows ; kk++ ) {
394 	  PetscInt ix = ii*a_ndata_rows + kk + jj*stride0, jx = ii*data_sz + kk*a_ndata_cols + jj;
395           PetscScalar tt = (PetscScalar)data[ix];
396 	  ierr = VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES );  CHKERRQ(ierr);
397 	}
398       }
399     }
400     ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr);
401     ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr);
402     /*
403       Scatter the element vertex information (still in the original vertex ordering)
404       to the correct processor
405     */
406     ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
407     CHKERRQ(ierr);
408     ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
409     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
410     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
411     ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
412     ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
413     /*
414       Put the element vertex data into a new allocation of the gdata->ele
415     */
416     ierr = PetscFree( *a_coarse_data );    CHKERRQ(ierr);
417     ierr = PetscMalloc( data_sz*ncrs_new*sizeof(PetscReal), a_coarse_data );    CHKERRQ(ierr);
418 
419     ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
420     data = *a_coarse_data;
421     for( jj=0; jj<a_ndata_cols ; jj++ ) {
422       for( ii=0 ; ii<ncrs_new ; ii++) {
423 	for( kk=0; kk<a_ndata_rows ; kk++ ) {
424 	  PetscInt ix = ii*a_ndata_rows + kk + jj*strideNew, jx = ii*data_sz + kk*a_ndata_cols + jj;
425 	  data[ix] = PetscRealPart(array[jx]);
426 	  array[jx] = 1.e300;
427 	}
428       }
429     }
430     ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
431     ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
432     /*
433       Invert for MatGetSubMatrix
434     */
435     ierr = ISInvertPermutation( isnum, ncrs_new*a_cbs, &new_indices ); CHKERRQ(ierr);
436     ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
437     ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
438     /* A_crs output */
439     ierr = MatGetSubMatrix( Cmat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
440     CHKERRQ(ierr);
441 
442     ierr = MatDestroy( &Cmat ); CHKERRQ(ierr);
443     Cmat = *a_Amat_crs; /* output */
444     ierr = MatSetBlockSize( Cmat, a_cbs );      CHKERRQ(ierr);
445 
446     /* prolongator */
447     ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
448     {
449       IS findices;
450       ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
451 #ifdef USE_R
452       ierr = MatGetSubMatrix( Pold, new_indices, findices, MAT_INITIAL_MATRIX, &Pnew );
453 #else
454       ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
455 #endif
456       CHKERRQ(ierr);
457 
458       ierr = ISDestroy( &findices ); CHKERRQ(ierr);
459     }
460 
461     ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
462     *a_P_inout = Pnew; /* output */
463 
464     ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
465     ierr = PetscFree( counts );  CHKERRQ(ierr);
466     ierr = PetscFree( ranks );  CHKERRQ(ierr);
467   }
468 
469   PetscFunctionReturn(0);
470 }
471 
472 /* -------------------------------------------------------------------------- */
473 /*
474    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
475                     by setting data structures and options.
476 
477    Input Parameter:
478 .  pc - the preconditioner context
479 
480    Application Interface Routine: PCSetUp()
481 
482    Notes:
483    The interface routine PCSetUp() is not usually called directly by
484    the user, but instead is called by PCApply() if necessary.
485 */
486 #undef __FUNCT__
487 #define __FUNCT__ "PCSetUp_GAMG"
488 PetscErrorCode PCSetUp_GAMG( PC a_pc )
489 {
490   PetscErrorCode  ierr;
491   PC_MG           *mg = (PC_MG*)a_pc->data;
492   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
493   PC_MG_Levels   **mglevels = mg->levels;
494   Mat              Amat = a_pc->mat, Pmat = a_pc->pmat;
495   PetscInt         fine_level, level, level1, M, N, bs, nloc, lidx, Istart, Iend;
496   MPI_Comm         wcomm = ((PetscObject)a_pc)->comm;
497   PetscMPIInt      mype,npe,nactivepe;
498   PetscBool        isOK;
499   Mat              Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];
500   PetscReal       *coarse_data = 0, *data, emaxs[GAMG_MAXLEVELS];
501   MatInfo          info;
502 
503   PetscFunctionBegin;
504   pc_gamg->m_count++;
505   if( a_pc->setupcalled > 0 ) {
506     /* just do Galerkin grids */
507     Mat B,dA,dB;
508 
509     /* PCSetUp_MG seems to insists on setting this to GMRES */
510     ierr = KSPSetType( mglevels[0]->smoothd, KSPPREONLY ); CHKERRQ(ierr);
511 
512     /* currently only handle case where mat and pmat are the same on coarser levels */
513     ierr = KSPGetOperators(mglevels[pc_gamg->m_Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);CHKERRQ(ierr);
514     /* (re)set to get dirty flag */
515     ierr = KSPSetOperators(mglevels[pc_gamg->m_Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
516     ierr = KSPSetUp( mglevels[pc_gamg->m_Nlevels-1]->smoothd ); CHKERRQ(ierr);
517 
518     for (level=pc_gamg->m_Nlevels-2; level>-1; level--) {
519       ierr = KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);CHKERRQ(ierr);
520       /* the first time through the matrix structure has changed from repartitioning */
521       if( pc_gamg->m_count == 2 ) {
522         ierr = MatDestroy( &B );  CHKERRQ(ierr);
523         ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
524         mglevels[level]->A = B;
525       }
526       else {
527         ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
528       }
529       ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
530       dB = B;
531       /* setup KSP/PC */
532       ierr = KSPSetUp( mglevels[level]->smoothd ); CHKERRQ(ierr);
533     }
534 
535 #define PRINT_MATS !PETSC_TRUE
536     /* plot levels - A */
537     if( PRINT_MATS ) {
538       for (lidx=0, level=pc_gamg->m_Nlevels-1; level>0 ; level--,lidx++){
539         PetscViewer viewer;
540         char fname[32]; KSP smoother; Mat Tmat, TTm;
541         ierr = PCMGGetSmoother( a_pc, lidx, &smoother ); CHKERRQ(ierr);
542         ierr = KSPGetOperators( smoother, &Tmat, &TTm, 0 ); CHKERRQ(ierr);
543         sprintf(fname,"Amat_%d_%d.m",(int)pc_gamg->m_count,(int)level);
544         ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
545         ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
546         ierr = MatView( Tmat, viewer ); CHKERRQ(ierr);
547         ierr = PetscViewerDestroy( &viewer );
548       }
549     }
550 
551     a_pc->setupcalled = 2;
552 
553     PetscFunctionReturn(0);
554   }
555   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
556   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
557   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
558   ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr);
559   ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr);
560   ierr = MatGetOwnershipRange( Amat, &Istart, &Iend ); CHKERRQ(ierr);
561   nloc = (Iend-Istart)/bs; assert((Iend-Istart)%bs == 0);
562 
563   /* get data of not around */
564   if( pc_gamg->m_data == 0 && nloc > 0 ) {
565     ierr  = PCSetCoordinates_GAMG( a_pc, -1, 0 );    CHKERRQ( ierr );
566   }
567   data = pc_gamg->m_data;
568 
569   /* Get A_i and R_i */
570   ierr = MatGetInfo(Amat,MAT_GLOBAL_SUM,&info); CHKERRQ(ierr);
571 #ifdef VERBOSE
572   PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
573 	      mype,__FUNCT__,0,N,pc_gamg->m_data_rows,pc_gamg->m_data_cols,
574 	      (int)(info.nz_used/(PetscReal)N),npe);
575 #endif
576   for ( level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */
577         level < GAMG_MAXLEVELS-1 && (level==0 || M>TOP_GRID_LIM) && (npe==1 || nactivepe>1);
578         level++ ){
579     level1 = level + 1;
580 #if (defined PETSC_USE_LOG && defined GAMG_STAGES)
581     ierr = PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr );
582 #endif
583 #if defined PETSC_USE_LOG
584     ierr = PetscLogEventBegin(gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
585 #endif
586     ierr = createProlongation(Aarr[level], data, pc_gamg->m_dim, pc_gamg->m_data_cols, pc_gamg->m_method,
587                               level, s_threshold, &bs, &Parr[level1], &coarse_data, &isOK, &emaxs[level] );
588     CHKERRQ(ierr);
589     ierr = PetscFree( data ); CHKERRQ( ierr );
590 #if defined PETSC_USE_LOG
591     ierr = PetscLogEventEnd(gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
592 #endif
593     if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */
594     if( isOK ) {
595 #if defined PETSC_USE_LOG
596       ierr = PetscLogEventBegin(gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
597 #endif
598       ierr = partitionLevel( Aarr[level], (pc_gamg->m_method != 0) ? bs : 1, pc_gamg->m_data_cols, bs,
599                              &Parr[level1], &coarse_data, &nactivepe, &Aarr[level1] );
600       CHKERRQ(ierr);
601 #if defined PETSC_USE_LOG
602       ierr = PetscLogEventEnd(gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
603 #endif
604       ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
605       ierr = MatGetInfo(Aarr[level1],MAT_GLOBAL_SUM,&info); CHKERRQ(ierr);
606 #ifdef VERBOSE
607       PetscPrintf(PETSC_COMM_WORLD,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
608 		  mype,__FUNCT__,(int)level1,N,pc_gamg->m_data_cols,
609 		  (int)(info.nz_used/(PetscReal)N),nactivepe);
610 #endif
611       /* coarse grids with SA can have zero row/cols from singleton aggregates */
612       /* aggregation method should gaurrentee this does not happen! */
613 
614 #ifdef VERBOSE
615       if( PETSC_TRUE ){
616         Vec diag; PetscScalar *data_arr,v; PetscInt Istart,Iend,kk,nloceq,id;
617         v = 1.e-10; /* LU factor has hard wired numbers for small diags so this needs to match (yuk) */
618         ierr = MatGetOwnershipRange(Aarr[level1], &Istart, &Iend); CHKERRQ(ierr);
619         nloceq = Iend-Istart;
620         ierr = MatGetVecs( Aarr[level1], &diag, 0 );    CHKERRQ(ierr);
621         ierr = MatGetDiagonal( Aarr[level1], diag );    CHKERRQ(ierr);
622         ierr = VecGetArray( diag, &data_arr );   CHKERRQ(ierr);
623         for(kk=0;kk<nloceq;kk++){
624           if(data_arr[kk]==0.0) {
625             id = kk + Istart;
626             ierr = MatSetValues(Aarr[level1],1,&id,1,&id,&v,INSERT_VALUES);
627             CHKERRQ(ierr);
628             PetscPrintf(PETSC_COMM_SELF,"\t[%d]%s warning: added zero to diag (%d) on level %d \n",mype,__FUNCT__,id,level1);
629           }
630         }
631         ierr = VecRestoreArray( diag, &data_arr ); CHKERRQ(ierr);
632         ierr = VecDestroy( &diag );                CHKERRQ(ierr);
633         ierr = MatAssemblyBegin(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
634         ierr = MatAssemblyEnd(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
635       }
636 #endif
637     }
638     else{
639       coarse_data = 0;
640       break;
641     }
642     data = coarse_data;
643 
644 #if (defined PETSC_USE_LOG && defined GAMG_STAGES)
645     ierr = PetscLogStagePop(); CHKERRQ( ierr );
646 #endif
647   }
648   if( coarse_data ) {
649     ierr = PetscFree( coarse_data ); CHKERRQ( ierr );
650   }
651 #ifdef VERBOSE
652   PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1);
653 #endif
654   pc_gamg->m_data = 0; /* destroyed coordinate data */
655   pc_gamg->m_Nlevels = level + 1;
656   fine_level = level;
657   ierr = PCMGSetLevels(a_pc,pc_gamg->m_Nlevels,PETSC_NULL);CHKERRQ(ierr);
658 
659   /* set default smoothers */
660   for ( lidx=1, level = pc_gamg->m_Nlevels-2;
661         lidx <= fine_level;
662         lidx++, level--) {
663     PetscReal emax, emin;
664     KSP smoother; PC subpc;
665     ierr = PCMGGetSmoother( a_pc, lidx, &smoother ); CHKERRQ(ierr);
666     ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr);
667     if( emaxs[level] > 0.0 ) emax=emaxs[level];
668     else{ /* eigen estimate 'emax' */
669       KSP eksp; Mat Lmat = Aarr[level];
670       Vec bb, xx; PC pc;
671 
672       ierr = MatGetVecs( Lmat, &bb, 0 );         CHKERRQ(ierr);
673       ierr = MatGetVecs( Lmat, &xx, 0 );         CHKERRQ(ierr);
674       {
675 	PetscRandom    rctx;
676 	ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr);
677 	ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
678 	ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
679 	ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr);
680       }
681       ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr);
682       ierr = KSPSetType( eksp, KSPCG );                      CHKERRQ(ierr);
683       ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr);
684       ierr = KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr );
685       ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr );
686       ierr = PCSetType( pc, PETSC_GAMG_SMOOTHER ); CHKERRQ(ierr); /* should be same as eigen estimates op. */
687       ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 );
688       CHKERRQ(ierr);
689       ierr = KSPSetNormType( eksp, KSP_NORM_NONE );                 CHKERRQ(ierr);
690 
691       ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr);
692       ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr);
693       ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr);
694       ierr = VecDestroy( &xx );       CHKERRQ(ierr);
695       ierr = VecDestroy( &bb );       CHKERRQ(ierr);
696       ierr = KSPDestroy( &eksp );       CHKERRQ(ierr);
697 #ifdef VERBOSE
698       PetscPrintf(PETSC_COMM_WORLD,"\t\t\t%s PC setup max eigen=%e min=%e PC=%s\n",__FUNCT__,emax,emin,PETSC_GAMG_SMOOTHER);
699 #endif
700     }
701     {
702       PetscInt N1, N0, tt;
703       ierr = MatGetSize( Aarr[level], &N1, &tt );         CHKERRQ(ierr);
704       ierr = MatGetSize( Aarr[level+1], &N0, &tt );       CHKERRQ(ierr);
705       emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); /* this should be about the coarsening rate */
706       emax *= 1.05;
707     }
708 
709     ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );
710     ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr);
711     /* ierr = KSPSetTolerances(smoother,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,2); CHKERRQ(ierr); */
712     ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
713     ierr = PCSetType( subpc, PETSC_GAMG_SMOOTHER ); CHKERRQ(ierr);
714     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
715   }
716   {
717     /* coarse grid */
718     KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
719     Mat Lmat = Aarr[pc_gamg->m_Nlevels-1];
720     ierr = PCMGGetSmoother( a_pc, 0, &smoother ); CHKERRQ(ierr);
721     ierr = KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ(ierr);
722     ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
723     ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
724     ierr = PCSetType( subpc, PCBJACOBI ); CHKERRQ(ierr);
725     ierr = PCSetUp( subpc ); CHKERRQ(ierr);
726     ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);
727     assert(ii==1);
728     ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
729     ierr = PCSetType( pc2, PCLU ); CHKERRQ(ierr);
730   }
731 
732   /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
733   ierr = PCSetFromOptions_MG(a_pc); CHKERRQ(ierr);
734   {
735     PetscBool galerkin;
736     ierr = PCMGGetGalerkin( a_pc,  &galerkin); CHKERRQ(ierr);
737     if(galerkin){
738       SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG.");
739     }
740   }
741 
742   /* plot levels - R/P */
743   if( PRINT_MATS ) {
744     for (level=pc_gamg->m_Nlevels-1;level>0;level--){
745       PetscViewer viewer;
746       char fname[32];
747       sprintf(fname,"Pmat_%d_%d.m",(int)pc_gamg->m_count,(int)level);
748       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
749       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
750       ierr = MatView( Parr[level], viewer ); CHKERRQ(ierr);
751       ierr = PetscViewerDestroy( &viewer );
752       sprintf(fname,"Amat_%d_%d.m",(int)pc_gamg->m_count,(int)level);
753       ierr = PetscViewerASCIIOpen( wcomm, fname, &viewer );  CHKERRQ(ierr);
754       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
755       ierr = MatView( Aarr[level], viewer ); CHKERRQ(ierr);
756       ierr = PetscViewerDestroy( &viewer );
757     }
758   }
759 
760   /* set interpolation between the levels, clean up */
761   for (lidx=0,level=pc_gamg->m_Nlevels-1;
762        lidx<fine_level;
763        lidx++, level--){
764     ierr = PCMGSetInterpolation( a_pc, lidx+1, Parr[level] );CHKERRQ(ierr);
765     ierr = MatDestroy( &Parr[level] );  CHKERRQ(ierr);
766     ierr = MatDestroy( &Aarr[level] );  CHKERRQ(ierr);
767   }
768 
769   /* setupcalled is set to 0 so that MG is setup from scratch */
770   a_pc->setupcalled = 0;
771   ierr = PCSetUp_MG( a_pc );CHKERRQ( ierr );
772   a_pc->setupcalled = 1; /* use 1 as signal that this has not been re-setup */
773 
774   {
775     KSP smoother;  /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
776     ierr = PCMGGetSmoother( a_pc, 0, &smoother ); CHKERRQ(ierr);
777     ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr);
778     ierr = KSPSetUp( smoother ); CHKERRQ(ierr);
779   }
780 
781   PetscFunctionReturn(0);
782 }
783 
784 /* ------------------------------------------------------------------------- */
785 /*
786    PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
787    that was created with PCCreate_GAMG().
788 
789    Input Parameter:
790 .  pc - the preconditioner context
791 
792    Application Interface Routine: PCDestroy()
793 */
794 #undef __FUNCT__
795 #define __FUNCT__ "PCDestroy_GAMG"
796 PetscErrorCode PCDestroy_GAMG(PC pc)
797 {
798   PetscErrorCode  ierr;
799   PC_MG           *mg = (PC_MG*)pc->data;
800   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
801 
802   PetscFunctionBegin;
803   ierr = PCReset_GAMG(pc);CHKERRQ(ierr);
804   ierr = PetscFree(pc_gamg);CHKERRQ(ierr);
805   ierr = PCDestroy_MG(pc);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 
810 #undef __FUNCT__
811 #define __FUNCT__ "PCGAMGSetProcEqLim"
812 /*@
813    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
814    processor reduction.
815 
816    Not Collective on PC
817 
818    Input Parameters:
819 .  pc - the preconditioner context
820 
821 
822    Options Database Key:
823 .  -pc_gamg_process_eq_limit
824 
825    Level: intermediate
826 
827    Concepts: Unstructured multrigrid preconditioner
828 
829 .seealso: ()
830 @*/
831 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
832 {
833   PetscErrorCode ierr;
834 
835   PetscFunctionBegin;
836   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
837   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
838   PetscFunctionReturn(0);
839 }
840 
841 EXTERN_C_BEGIN
842 #undef __FUNCT__
843 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG"
844 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
845 {
846   /* PC_MG           *mg = (PC_MG*)pc->data; */
847   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
848 
849   PetscFunctionBegin;
850   if(n>0) s_min_eq_proc = n;
851   PetscFunctionReturn(0);
852 }
853 EXTERN_C_END
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "PCGAMGAvoidRepartitioning"
857 /*@
858    PCGAMGAvoidRepartitioning - Do not repartition the coarse grids
859 
860    Collective on PC
861 
862    Input Parameters:
863 .  pc - the preconditioner context
864 
865 
866    Options Database Key:
867 .  -pc_gamg_avoid_repartitioning
868 
869    Level: intermediate
870 
871    Concepts: Unstructured multrigrid preconditioner
872 
873 .seealso: ()
874 @*/
875 PetscErrorCode PCGAMGAvoidRepartitioning(PC pc, PetscBool n)
876 {
877   PetscErrorCode ierr;
878 
879   PetscFunctionBegin;
880   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
881   ierr = PetscTryMethod(pc,"PCGAMGAvoidRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
885 EXTERN_C_BEGIN
886 #undef __FUNCT__
887 #define __FUNCT__ "PCGAMGAvoidRepartitioning_GAMG"
888 PetscErrorCode PCGAMGAvoidRepartitioning_GAMG(PC pc, PetscBool n)
889 {
890   /* PC_MG           *mg = (PC_MG*)pc->data; */
891   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
892 
893   PetscFunctionBegin;
894   s_avoid_repart = n;
895   PetscFunctionReturn(0);
896 }
897 EXTERN_C_END
898 
899 #undef __FUNCT__
900 #define __FUNCT__ "PCGAMGSetThreshold"
901 /*@
902    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
903 
904    Not collective on PC
905 
906    Input Parameters:
907 .  pc - the preconditioner context
908 
909 
910    Options Database Key:
911 .  -pc_gamg_threshold
912 
913    Level: intermediate
914 
915    Concepts: Unstructured multrigrid preconditioner
916 
917 .seealso: ()
918 @*/
919 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
920 {
921   PetscErrorCode ierr;
922 
923   PetscFunctionBegin;
924   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
925   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr);
926   PetscFunctionReturn(0);
927 }
928 
929 EXTERN_C_BEGIN
930 #undef __FUNCT__
931 #define __FUNCT__ "PCGAMGSetThreshold_GAMG"
932 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
933 {
934   /* PC_MG           *mg = (PC_MG*)pc->data; */
935   /* PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx; */
936 
937   PetscFunctionBegin;
938   s_threshold = n;
939   PetscFunctionReturn(0);
940 }
941 EXTERN_C_END
942 
943 #undef __FUNCT__
944 #define __FUNCT__ "PCGAMGSetSolverType"
945 /*@
946    PCGAMGSetSolverType - Set solution method.
947 
948    Collective on PC
949 
950    Input Parameters:
951 .  pc - the preconditioner context
952 
953 
954    Options Database Key:
955 .  -pc_gamg_type
956 
957    Level: intermediate
958 
959    Concepts: Unstructured multrigrid preconditioner
960 
961 .seealso: ()
962 @*/
963 PetscErrorCode PCGAMGSetSolverType(PC pc, char str[], PetscInt sz )
964 {
965   PetscErrorCode ierr;
966 
967   PetscFunctionBegin;
968   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
969   ierr = PetscTryMethod(pc,"PCGAMGSetSolverType_C",(PC,char[],PetscInt),(pc,str,sz));
970   CHKERRQ(ierr);
971   PetscFunctionReturn(0);
972 }
973 
974 EXTERN_C_BEGIN
975 #undef __FUNCT__
976 #define __FUNCT__ "PCGAMGSetSolverType_GAMG"
977 PetscErrorCode PCGAMGSetSolverType_GAMG(PC pc, char str[], PetscInt sz )
978 {
979   PC_MG           *mg = (PC_MG*)pc->data;
980   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
981 
982   PetscFunctionBegin;
983   if(sz < 64) strcpy(pc_gamg->m_type,str);
984   PetscFunctionReturn(0);
985 }
986 EXTERN_C_END
987 
988 #undef __FUNCT__
989 #define __FUNCT__ "PCSetFromOptions_GAMG"
990 PetscErrorCode PCSetFromOptions_GAMG(PC pc)
991 {
992   PetscErrorCode  ierr;
993   PC_MG           *mg = (PC_MG*)pc->data;
994   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
995   PetscBool flag;
996 
997   PetscFunctionBegin;
998   ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr);
999   {
1000     ierr = PetscOptionsString("-pc_gamg_type",
1001                               "Solver type: plane aggregation ('pa'), smoothed aggregation ('sa') or geometric multigrid (default)",
1002                               "PCGAMGSetSolverType",
1003                               pc_gamg->m_type,
1004                               pc_gamg->m_type,
1005                               64,
1006                               &flag );
1007     CHKERRQ(ierr);
1008     if( flag && pc_gamg->m_data != 0 ) {
1009       if( (strcmp(pc_gamg->m_type,"sa")==0 && pc_gamg->m_method != 2) ||
1010           (strcmp(pc_gamg->m_type,"pa")==0 && pc_gamg->m_method != 1) ||
1011           (strcmp(pc_gamg->m_type,"geo")==0 && pc_gamg->m_method != 0) ) {
1012         SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG, "PCSetFromOptions called of PCSetCoordinates (with new method, after data was created)");
1013       }
1014     }
1015 
1016     if (flag && strcmp(pc_gamg->m_type,"sa") == 0) pc_gamg->m_method = 2;
1017     else if (flag && strcmp(pc_gamg->m_type,"pa") == 0) pc_gamg->m_method = 1;
1018     else pc_gamg->m_method = 0;
1019 
1020     /* common (static) variable */
1021     ierr = PetscOptionsBool("-pc_gamg_avoid_repartitioning",
1022                             "Do not repartion coarse grids (false)",
1023                             "PCGAMGAvoidRepartitioning",
1024                             s_avoid_repart,
1025                             &s_avoid_repart,
1026                             &flag);
1027     CHKERRQ(ierr);
1028 
1029     /* common (static) variable */
1030     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit",
1031                            "Limit (goal) on number of equations per process on coarse grids",
1032                            "PCGAMGSetProcEqLim",
1033                            s_min_eq_proc,
1034                            &s_min_eq_proc,
1035                            &flag );
1036     CHKERRQ(ierr);
1037 
1038     /* common (static) variable */
1039     ierr = PetscOptionsReal("-pc_gamg_threshold",
1040                             "Relative threshold to use for dropping edges in aggregation graph",
1041                             "PCGAMGSetThreshold",
1042                             s_threshold,
1043                             &s_threshold,
1044                             &flag );
1045     CHKERRQ(ierr);
1046   }
1047   ierr = PetscOptionsTail();CHKERRQ(ierr);
1048 
1049   PetscFunctionReturn(0);
1050 }
1051 
1052 /* -------------------------------------------------------------------------- */
1053 /*
1054  PCCreate_GAMG - Creates a GAMG preconditioner context, PC_GAMG
1055 
1056    Input Parameter:
1057 .  pc - the preconditioner context
1058 
1059    Application Interface Routine: PCCreate()
1060 
1061   */
1062  /* MC
1063      PCGAMG - Use algebraic multigrid preconditioning. This preconditioner requires you provide
1064        fine grid discretization matrix and coordinates on the fine grid.
1065 
1066    Options Database Key:
1067    Multigrid options(inherited)
1068 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (MGSetCycles)
1069 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (MGSetNumberSmoothUp)
1070 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (MGSetNumberSmoothDown)
1071    -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
1072    GAMG options:
1073 
1074    Level: intermediate
1075   Concepts: multigrid
1076 
1077 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1078            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), MPSetCycles(), PCMGSetNumberSmoothDown(),
1079            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1080            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1081            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1082 M */
1083 
1084 EXTERN_C_BEGIN
1085 #undef __FUNCT__
1086 #define __FUNCT__ "PCCreate_GAMG"
1087 PetscErrorCode  PCCreate_GAMG(PC pc)
1088 {
1089   PetscErrorCode  ierr;
1090   PC_GAMG         *pc_gamg;
1091   PC_MG           *mg;
1092   PetscClassId     cookie;
1093 #if defined PETSC_USE_LOG
1094   static int count = 0;
1095 #endif
1096 
1097   PetscFunctionBegin;
1098   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1099   ierr = PCSetType(pc,PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
1100   ierr = PetscObjectChangeTypeName((PetscObject)pc,PCGAMG);CHKERRQ(ierr);
1101 
1102   /* create a supporting struct and attach it to pc */
1103   ierr = PetscNewLog(pc,PC_GAMG,&pc_gamg);CHKERRQ(ierr);
1104   pc_gamg->m_data_sz = 0; pc_gamg->m_data = 0; pc_gamg->m_count = 0;
1105   mg = (PC_MG*)pc->data;
1106   mg->innerctx = pc_gamg;
1107 
1108   pc_gamg->m_Nlevels    = -1;
1109 
1110   /* overwrite the pointers of PCMG by the functions of PCGAMG */
1111   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1112   pc->ops->setup          = PCSetUp_GAMG;
1113   pc->ops->reset          = PCReset_GAMG;
1114   pc->ops->destroy        = PCDestroy_GAMG;
1115 
1116   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1117 					    "PCSetCoordinates_C",
1118 					    "PCSetCoordinates_GAMG",
1119 					    PCSetCoordinates_GAMG);
1120   CHKERRQ(ierr);
1121 
1122   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1123 					    "PCGAMGSetProcEqLim_C",
1124 					    "PCGAMGSetProcEqLim_GAMG",
1125 					    PCGAMGSetProcEqLim_GAMG);
1126   CHKERRQ(ierr);
1127 
1128   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1129 					    "PCGAMGAvoidRepartitioning_C",
1130 					    "PCGAMGAvoidRepartitioning_GAMG",
1131 					    PCGAMGAvoidRepartitioning_GAMG);
1132   CHKERRQ(ierr);
1133 
1134   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1135 					    "PCGAMGSetThreshold_C",
1136 					    "PCGAMGSetThreshold_GAMG",
1137 					    PCGAMGSetThreshold_GAMG);
1138   CHKERRQ(ierr);
1139 
1140   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1141 					    "PCGAMGSetSolverType_C",
1142 					    "PCGAMGSetSolverType_GAMG",
1143 					    PCGAMGSetSolverType_GAMG);
1144   CHKERRQ(ierr);
1145 
1146 #if defined PETSC_USE_LOG
1147   if( count++ == 0 ) {
1148     PetscClassIdRegister("GAMG Setup",&cookie);
1149     PetscLogEventRegister("GAMG: createProl", cookie, &gamg_setup_events[SET1]);
1150     PetscLogEventRegister("  Graph", cookie, &gamg_setup_events[GRAPH]);
1151     PetscLogEventRegister("    G.Mat", cookie, &gamg_setup_events[GRAPH_MAT]);
1152     PetscLogEventRegister("    G.Filter", cookie, &gamg_setup_events[GRAPH_FILTER]);
1153     PetscLogEventRegister("    G.Square", cookie, &gamg_setup_events[GRAPH_SQR]);
1154     PetscLogEventRegister("  MIS/Agg", cookie, &gamg_setup_events[SET4]);
1155     PetscLogEventRegister("  geo: growSupp", cookie, &gamg_setup_events[SET5]);
1156     PetscLogEventRegister("  geo: triangle", cookie, &gamg_setup_events[SET6]);
1157     PetscLogEventRegister("    search&set", cookie, &gamg_setup_events[FIND_V]);
1158     PetscLogEventRegister("  SA: init", cookie, &gamg_setup_events[SET7]);
1159     /* PetscLogEventRegister("  SA: frmProl0", cookie, &gamg_setup_events[SET8]); */
1160     PetscLogEventRegister("  SA: smooth", cookie, &gamg_setup_events[SET9]);
1161     PetscLogEventRegister("GAMG: partLevel", cookie, &gamg_setup_events[SET2]);
1162     PetscLogEventRegister("  PL repartition", cookie, &gamg_setup_events[SET12]);
1163     /* PetscLogEventRegister(" PL move data", cookie, &gamg_setup_events[SET13]); */
1164     /* PetscLogEventRegister("GAMG: fix", cookie, &gamg_setup_events[SET10]); */
1165     /* PetscLogEventRegister("GAMG: set levels", cookie, &gamg_setup_events[SET11]); */
1166 
1167     /* create timer stages */
1168 #if defined GAMG_STAGES
1169     {
1170       char str[32];
1171       sprintf(str,"MG Level %d (finest)",0);
1172       PetscLogStageRegister(str, &gamg_stages[0]);
1173       PetscInt lidx;
1174       for (lidx=1;lidx<9;lidx++){
1175 	sprintf(str,"MG Level %d",lidx);
1176 	PetscLogStageRegister(str, &gamg_stages[lidx]);
1177       }
1178     }
1179 #endif
1180   }
1181 #endif
1182   PetscFunctionReturn(0);
1183 }
1184 EXTERN_C_END
1185