xref: /petsc/src/ksp/pc/impls/gamg/gamg.c (revision eabe889f41eab0385d57eba42f0f78f2ecc7ced4)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 #include "petsc-private/matimpl.h"
5 #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
6 #include <petsc-private/kspimpl.h>
7 
8 #if defined PETSC_GAMG_USE_LOG
9 PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10 #endif
11 
12 #if defined PETSC_USE_LOG
13 PetscLogEvent PC_GAMGGgraph_AGG;
14 PetscLogEvent PC_GAMGGgraph_GEO;
15 PetscLogEvent PC_GAMGCoarsen_AGG;
16 PetscLogEvent PC_GAMGCoarsen_GEO;
17 PetscLogEvent PC_GAMGProlongator_AGG;
18 PetscLogEvent PC_GAMGProlongator_GEO;
19 PetscLogEvent PC_GAMGOptprol_AGG;
20 #endif
21 
22 #define GAMG_MAXLEVELS 30
23 
24 /* #define GAMG_STAGES */
25 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
26 static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
27 #endif
28 
29 static PetscFList GAMGList = 0;
30 
31 /* ----------------------------------------------------------------------------- */
32 #undef __FUNCT__
33 #define __FUNCT__ "PCReset_GAMG"
34 PetscErrorCode PCReset_GAMG(PC pc)
35 {
36   PetscErrorCode  ierr;
37   PC_MG           *mg = (PC_MG*)pc->data;
38   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
39 
40   PetscFunctionBegin;
41   if( pc_gamg->data != 0 ) { /* this should not happen, cleaned up in SetUp */
42     ierr = PetscFree(pc_gamg->data); CHKERRQ(ierr);
43   }
44   pc_gamg->data = 0; pc_gamg->data_sz = 0;
45   PetscFunctionReturn(0);
46 }
47 
48 /* -------------------------------------------------------------------------- */
49 /*
50    createLevel: create coarse op with RAP.  repartition and/or reduce number
51      of active processors.
52 
53    Input Parameter:
54    . pc - parameters
55    . Amat_fine - matrix on this fine (k) level
56    . cbs - coarse block size
57    . isLast -
58    In/Output Parameter:
59    . a_P_inout - prolongation operator to the next level (k-1)
60    . a_nactive_proc - number of active procs
61    Output Parameter:
62    . a_Amat_crs - coarse matrix that is created (k-1)
63 */
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "createLevel"
67 static PetscErrorCode createLevel( const PC pc,
68                             const Mat Amat_fine,
69                             const PetscInt cbs,
70                             const PetscBool isLast,
71                             Mat *a_P_inout,
72                             PetscMPIInt *a_nactive_proc,
73                             Mat *a_Amat_crs
74                             )
75 {
76   PC_MG           *mg = (PC_MG*)pc->data;
77   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
78   const PetscBool  repart = pc_gamg->repart;
79   const PetscInt   min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit;
80   PetscErrorCode   ierr;
81   Mat              Cmat,Pnew,Pold=*a_P_inout;
82   IS               new_indices,isnum;
83   MPI_Comm         wcomm = ((PetscObject)Amat_fine)->comm;
84   PetscMPIInt      mype,npe,new_npe,nactive = *a_nactive_proc;
85   PetscInt         neq,NN,Istart,Iend,Istart0,Iend0,ncrs_new,ncrs0,rfactor;
86 
87   PetscFunctionBegin;
88   ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr);
89   ierr = MPI_Comm_size( wcomm, &npe );  CHKERRQ(ierr);
90 
91   /* RAP */
92 #ifdef USE_R
93   /* make R wih brute force for now */
94   ierr = MatTranspose( Pold, Pnew );
95   ierr = MatDestroy( &Pold );  CHKERRQ(ierr);
96   ierr = MatRARt( Amat_fine, Pnew, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
97   Pold = Pnew;
98 #else
99   ierr = MatPtAP( Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr);
100 #endif
101   ierr = MatSetBlockSize( Cmat, cbs );      CHKERRQ(ierr);
102   ierr = MatGetOwnershipRange( Cmat, &Istart0, &Iend0 ); CHKERRQ(ierr);
103   ncrs0 = (Iend0-Istart0)/cbs; assert((Iend0-Istart0)%cbs == 0);
104 
105   /* get number of PEs to make active, reduce */
106   ierr = MatGetSize( Cmat, &neq, &NN );  CHKERRQ(ierr);
107   new_npe = (PetscMPIInt)((float)neq/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
108   if( new_npe == 0 || neq < coarse_max ) new_npe = 1;
109   else if (new_npe >= nactive ) new_npe = nactive; /* no change, rare */
110   if( isLast ) new_npe = 1;
111 
112   if( !repart && new_npe==nactive ) {
113     *a_Amat_crs = Cmat; /* output - no repartitioning or reduction */
114   }
115   else {
116     /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */
117     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,data_sz=ndata_rows*ndata_cols;
118     const PetscInt  stride0=ncrs0*pc_gamg->data_cell_rows;
119     PetscInt        *counts,is_sz,*newproc_idx,ii,jj,kk,strideNew,*tidx,inpe,targetPE;
120     IS              isnewproc;
121     VecScatter      vecscat;
122     PetscScalar    *array;
123     Vec             src_crd, dest_crd;
124     IS              isscat;
125 
126     ierr = PetscMalloc( npe*sizeof(PetscInt), &counts ); CHKERRQ(ierr);
127 #if defined PETSC_GAMG_USE_LOG
128       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr);
129 #endif
130     if( repart ) {
131       /* create sub communicator  */
132       Mat             adj;
133 
134       if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,neq);
135 
136       /* MatPartitioningApply call MatConvert, which is collective */
137       if( cbs == 1 ) {
138 	ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
139       }
140       else{
141 	/* make a scalar matrix to partition */
142 	Mat tMat;
143 	PetscInt ncols,jj,Ii;
144 	const PetscScalar *vals;
145 	const PetscInt *idx;
146 	PetscInt *d_nnz, *o_nnz;
147 	static PetscInt llev = 0;
148 
149 	ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr);
150 	ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &o_nnz ); CHKERRQ(ierr);
151 	for ( Ii = Istart0, jj = 0 ; Ii < Iend0 ; Ii += cbs, jj++ ) {
152 	  ierr = MatGetRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr);
153 	  d_nnz[jj] = ncols/cbs;
154 	  o_nnz[jj] = ncols/cbs;
155 	  ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr);
156 	  if( d_nnz[jj] > ncrs0 ) d_nnz[jj] = ncrs0;
157 	  if( o_nnz[jj] > (neq/cbs-ncrs0) ) o_nnz[jj] = neq/cbs-ncrs0;
158 	}
159 
160 	ierr = MatCreateAIJ( wcomm, ncrs0, ncrs0,
161                              PETSC_DETERMINE, PETSC_DETERMINE,
162                              0, d_nnz, 0, o_nnz,
163                              &tMat );
164 	CHKERRQ(ierr);
165 	ierr = PetscFree( d_nnz ); CHKERRQ(ierr);
166 	ierr = PetscFree( o_nnz ); CHKERRQ(ierr);
167 
168 	for ( ii = Istart0; ii < Iend0; ii++ ) {
169 	  PetscInt dest_row = ii/cbs;
170 	  ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr);
171 	  for( jj = 0 ; jj < ncols ; jj++ ){
172 	    PetscInt dest_col = idx[jj]/cbs;
173 	    PetscScalar v = 1.0;
174 	    ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr);
175 	  }
176 	  ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr);
177 	}
178 	ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
179 	ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
180 
181 	if( llev++ == -1 ) {
182 	  PetscViewer viewer; char fname[32];
183 	  ierr = PetscSNPrintf(fname,sizeof fname,"part_mat_%D.mat",llev);CHKERRQ(ierr);
184 	  PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer);
185 	  ierr = MatView( tMat, viewer ); CHKERRQ(ierr);
186 	  ierr = PetscViewerDestroy( &viewer );
187 	}
188 
189 	ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );   CHKERRQ(ierr);
190 
191 	ierr = MatDestroy( &tMat );  CHKERRQ(ierr);
192       }
193 
194       { /* partition: get newproc_idx, set is_sz */
195 	char prefix[256];
196 	const char *pcpre;
197 	const PetscInt *is_idx;
198 	MatPartitioning  mpart;
199 	IS proc_is;
200 
201 	ierr = MatPartitioningCreate( wcomm, &mpart ); CHKERRQ(ierr);
202 	ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr);
203 	ierr = PCGetOptionsPrefix( pc, &pcpre );CHKERRQ(ierr);
204 	ierr = PetscSNPrintf(prefix,sizeof prefix,"%spc_gamg_",pcpre?pcpre:"");CHKERRQ(ierr);
205 	ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr);
206 	ierr = MatPartitioningSetFromOptions( mpart );    CHKERRQ(ierr);
207 	ierr = MatPartitioningSetNParts( mpart, new_npe );CHKERRQ(ierr);
208 	ierr = MatPartitioningApply( mpart, &proc_is ); CHKERRQ(ierr);
209 	ierr = MatPartitioningDestroy( &mpart );          CHKERRQ(ierr);
210 
211 	/* collect IS info */
212 	ierr = ISGetLocalSize( proc_is, &is_sz );       CHKERRQ(ierr);
213 	ierr = PetscMalloc( cbs*is_sz*sizeof(PetscInt), &newproc_idx ); CHKERRQ(ierr);
214 	ierr = ISGetIndices( proc_is, &is_idx );        CHKERRQ(ierr);
215 	NN = 1; /* bring to "front" of machine */
216 	/*NN = npe/new_npe;*/ /* spread partitioning across machine */
217 	for( kk = jj = 0 ; kk < is_sz ; kk++ ){
218 	  for( ii = 0 ; ii < cbs ; ii++, jj++ ){
219 	    newproc_idx[jj] = is_idx[kk] * NN; /* distribution */
220 	  }
221 	}
222 	ierr = ISRestoreIndices( proc_is, &is_idx );     CHKERRQ(ierr);
223 	ierr = ISDestroy( &proc_is );                    CHKERRQ(ierr);
224 
225 	is_sz *= cbs;
226       }
227       ierr = MatDestroy( &adj );                       CHKERRQ(ierr);
228 
229       ierr = ISCreateGeneral( wcomm, is_sz, newproc_idx, PETSC_COPY_VALUES, &isnewproc );
230       CHKERRQ(ierr);
231       if( newproc_idx != 0 ) {
232 	ierr = PetscFree( newproc_idx );  CHKERRQ(ierr);
233       }
234     }
235     else { /* simple aggreagtion of parts */
236       /* find factor */
237       if( new_npe == 1 ) rfactor = npe; /* easy */
238       else {
239 	PetscReal best_fact = 0.;
240 	jj = -1;
241 	for( kk = 1 ; kk <= npe ; kk++ ){
242 	  if( npe%kk==0 ) { /* a candidate */
243 	    PetscReal nactpe = (PetscReal)npe/(PetscReal)kk, fact = nactpe/(PetscReal)new_npe;
244 	    if(fact > 1.0) fact = 1./fact; /* keep fact < 1 */
245 	    if( fact > best_fact ) {
246 	      best_fact = fact; jj = kk;
247 	    }
248 	  }
249 	}
250 	if(jj!=-1) rfactor = jj;
251 	else rfactor = 1; /* prime? */
252       }
253       new_npe = npe/rfactor;
254 
255       if( new_npe==nactive ) {
256 	*a_Amat_crs = Cmat; /* output - no repartitioning or reduction */
257 	ierr = PetscFree( counts );  CHKERRQ(ierr);
258 	*a_nactive_proc = new_npe; /* output */
259 	if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s aggregate processors noop: new_npe=%d, neq=%d\n",mype,__FUNCT__,new_npe,neq);
260 	PetscFunctionReturn(0);
261       }
262 
263       /* reduce - isnewproc */
264       targetPE = mype/rfactor;
265 
266       if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s aggregate processors: npe: %d --> %d, neq=%d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,neq);
267       is_sz = ncrs0*cbs;
268       ierr = ISCreateStride( wcomm, is_sz, targetPE, 0, &isnewproc );
269       CHKERRQ(ierr);
270     }
271 
272     /*
273       Create an index set from the isnewproc index set to indicate the mapping TO
274     */
275     ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr);
276     /*
277       Determine how many elements are assigned to each processor
278     */
279     inpe = npe;
280     ierr = ISPartitioningCount( isnewproc, inpe, counts ); CHKERRQ(ierr);
281     ierr = ISDestroy( &isnewproc );                       CHKERRQ(ierr);
282     ncrs_new = counts[mype]/cbs;
283     strideNew = ncrs_new*ndata_rows;
284 #if defined PETSC_GAMG_USE_LOG
285       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);   CHKERRQ(ierr);
286 #endif
287     /* Create a vector to contain the newly ordered element information */
288     ierr = VecCreate( wcomm, &dest_crd );
289     ierr = VecSetSizes( dest_crd, data_sz*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr);
290     ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /* this is needed! */
291     /*
292      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
293      a block size of ...).  Note, ISs are expanded into equation space by 'cbs'.
294      */
295     ierr = PetscMalloc( (ncrs0*data_sz)*sizeof(PetscInt), &tidx ); CHKERRQ(ierr);
296     ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr);
297     for(ii=0,jj=0; ii<ncrs0 ; ii++) {
298       PetscInt id = idx[ii*cbs]/cbs; /* get node back */
299       for( kk=0; kk<data_sz ; kk++, jj++) tidx[jj] = id*data_sz + kk;
300     }
301     ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr);
302     ierr = ISCreateGeneral( wcomm, data_sz*ncrs0, tidx, PETSC_COPY_VALUES, &isscat );
303     CHKERRQ(ierr);
304     ierr = PetscFree( tidx );  CHKERRQ(ierr);
305     /*
306      Create a vector to contain the original vertex information for each element
307      */
308     ierr = VecCreateSeq( PETSC_COMM_SELF, data_sz*ncrs0, &src_crd ); CHKERRQ(ierr);
309     for( jj=0; jj<ndata_cols ; jj++ ) {
310       for( ii=0 ; ii<ncrs0 ; ii++) {
311 	for( kk=0; kk<ndata_rows ; kk++ ) {
312 	  PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*data_sz + kk*ndata_cols + jj;
313           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
314 	  ierr = VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES );  CHKERRQ(ierr);
315 	}
316       }
317     }
318     ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr);
319     ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr);
320     /*
321       Scatter the element vertex information (still in the original vertex ordering)
322       to the correct processor
323     */
324     ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
325     CHKERRQ(ierr);
326     ierr = ISDestroy( &isscat );       CHKERRQ(ierr);
327     ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
328     ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
329     ierr = VecScatterDestroy( &vecscat );       CHKERRQ(ierr);
330     ierr = VecDestroy( &src_crd );       CHKERRQ(ierr);
331     /*
332       Put the element vertex data into a new allocation of the gdata->ele
333     */
334     ierr = PetscFree( pc_gamg->data );    CHKERRQ(ierr);
335     ierr = PetscMalloc( data_sz*ncrs_new*sizeof(PetscReal), &pc_gamg->data );    CHKERRQ(ierr);
336     pc_gamg->data_sz = data_sz*ncrs_new;
337     ierr = VecGetArray( dest_crd, &array );    CHKERRQ(ierr);
338     for( jj=0; jj<ndata_cols ; jj++ ) {
339       for( ii=0 ; ii<ncrs_new ; ii++) {
340 	for( kk=0; kk<ndata_rows ; kk++ ) {
341 	  PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*data_sz + kk*ndata_cols + jj;
342 	  pc_gamg->data[ix] = PetscRealPart(array[jx]);
343 	  array[jx] = 1.e300;
344 	}
345       }
346     }
347     ierr = VecRestoreArray( dest_crd, &array );    CHKERRQ(ierr);
348     ierr = VecDestroy( &dest_crd );    CHKERRQ(ierr);
349 #if defined PETSC_GAMG_USE_LOG
350     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
351 #endif
352     /*
353       Invert for MatGetSubMatrix
354     */
355     ierr = ISInvertPermutation( isnum, ncrs_new*cbs, &new_indices ); CHKERRQ(ierr);
356     ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */
357     ierr = ISDestroy( &isnum ); CHKERRQ(ierr);
358 #if defined PETSC_GAMG_USE_LOG
359     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr);
360     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
361 #endif
362     /* A_crs output */
363     ierr = MatGetSubMatrix( Cmat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs );
364     CHKERRQ(ierr);
365 
366     ierr = MatDestroy( &Cmat ); CHKERRQ(ierr);
367     Cmat = *a_Amat_crs; /* output */
368     ierr = MatSetBlockSize( Cmat, cbs );      CHKERRQ(ierr);
369 #if defined PETSC_GAMG_USE_LOG
370     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr);
371 #endif
372     /* prolongator */
373     ierr = MatGetOwnershipRange( Pold, &Istart, &Iend );    CHKERRQ(ierr);
374     {
375       IS findices;
376 #if defined PETSC_GAMG_USE_LOG
377       ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
378 #endif
379       ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);   CHKERRQ(ierr);
380 #ifdef USE_R
381       ierr = MatGetSubMatrix( Pold, new_indices, findices, MAT_INITIAL_MATRIX, &Pnew );
382 #else
383       ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew );
384 #endif
385       CHKERRQ(ierr);
386       ierr = ISDestroy( &findices ); CHKERRQ(ierr);
387 #if defined PETSC_GAMG_USE_LOG
388       ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr);
389 #endif
390     }
391     ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr);
392     *a_P_inout = Pnew; /* output - repartitioned */
393     ierr = ISDestroy( &new_indices ); CHKERRQ(ierr);
394     ierr = PetscFree( counts );  CHKERRQ(ierr);
395   }
396 
397   *a_nactive_proc = new_npe; /* output */
398 
399   if( !PETSC_TRUE ) {
400     PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
401     if(llev==0) {
402       sprintf(fname,"Cmat_%d.m",llev++);
403       PetscViewerASCIIOpen(wcomm,fname,&viewer);
404       ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
405       ierr = MatView(Amat_fine, viewer ); CHKERRQ(ierr);
406       ierr = PetscViewerDestroy( &viewer );
407     }
408     sprintf(fname,"Cmat_%d.m",llev++);
409     PetscViewerASCIIOpen(wcomm,fname,&viewer);
410     ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);  CHKERRQ(ierr);
411     ierr = MatView(Cmat, viewer ); CHKERRQ(ierr);
412     ierr = PetscViewerDestroy( &viewer );
413   }
414 
415   PetscFunctionReturn(0);
416 }
417 
418 /* -------------------------------------------------------------------------- */
419 /*
420    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
421                     by setting data structures and options.
422 
423    Input Parameter:
424 .  pc - the preconditioner context
425 
426    Application Interface Routine: PCSetUp()
427 
428    Notes:
429    The interface routine PCSetUp() is not usually called directly by
430    the user, but instead is called by PCApply() if necessary.
431 */
432 #undef __FUNCT__
433 #define __FUNCT__ "PCSetUp_GAMG"
434 PetscErrorCode PCSetUp_GAMG( PC pc )
435 {
436   PetscErrorCode  ierr;
437   PC_MG           *mg = (PC_MG*)pc->data;
438   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
439   Mat              Pmat = pc->pmat;
440   PetscInt         fine_level,level,level1,M,N,bs,nloc,lidx,Istart,Iend,nASMBlocksArr[GAMG_MAXLEVELS];
441   MPI_Comm         wcomm = ((PetscObject)pc)->comm;
442   PetscMPIInt      mype,npe,nactivepe;
443   Mat              Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS];
444   PetscReal        emaxs[GAMG_MAXLEVELS];
445   IS              *ASMLocalIDsArr[GAMG_MAXLEVELS],removedEqs[GAMG_MAXLEVELS];
446   PetscInt         level_bs[GAMG_MAXLEVELS];
447   PetscLogDouble   nnz0=0,nnztot=0;
448   MatInfo          info;
449 
450   PetscFunctionBegin;
451   ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr);
452   ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr);
453   if( pc_gamg->setup_count++ > 0 ) {
454     PC_MG_Levels   **mglevels = mg->levels;
455     /* just do Galerkin grids */
456     Mat B,dA,dB;
457     assert(pc->setupcalled);
458 
459     if( pc_gamg->Nlevels > 1 ) {
460       /* currently only handle case where mat and pmat are the same on coarser levels */
461       ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);CHKERRQ(ierr);
462       /* (re)set to get dirty flag */
463       ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
464 
465       for (level=pc_gamg->Nlevels-2; level>-1; level--) {
466         /* the first time through the matrix structure has changed from repartitioning */
467         if( pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) {
468           ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr);
469           ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr);
470           mglevels[level]->A = B;
471         }
472         else {
473           ierr = KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);CHKERRQ(ierr);
474           ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr);
475         }
476         ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
477         dB = B;
478         }
479     }
480 
481     ierr = PCSetUp_MG( pc );CHKERRQ( ierr );
482 
483     /* PCSetUp_MG seems to insists on setting this to GMRES */
484     ierr = KSPSetType( mglevels[0]->smoothd, KSPPREONLY ); CHKERRQ(ierr);
485 
486     PetscFunctionReturn(0);
487   }
488   assert(pc->setupcalled == 0);
489 
490   /* GAMG requires input of fine-grid matrix. It determines nlevels. */
491   ierr = MatGetBlockSize( Pmat, &bs ); CHKERRQ(ierr);
492   ierr = MatGetSize( Pmat, &M, &N );CHKERRQ(ierr);
493   ierr = MatGetOwnershipRange( Pmat, &Istart, &Iend ); CHKERRQ(ierr);
494   nloc = (Iend-Istart)/bs; assert((Iend-Istart)%bs == 0);
495 
496   if( pc_gamg->data == 0 && nloc > 0 ) {
497     if(!pc_gamg->createdefaultdata){
498       SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_LIB,"'createdefaultdata' not set?!?! need to support NULL data!!!");
499     }
500     ierr = pc_gamg->createdefaultdata( pc ); CHKERRQ(ierr);
501   }
502 
503   /* Get A_i and R_i */
504   if (pc_gamg->verbose) {
505     if(pc_gamg->verbose==1) ierr =  MatGetInfo(Pmat,MAT_LOCAL,&info);
506     else ierr =  MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);
507     CHKERRQ(ierr);
508     nnz0 = info.nz_used;
509     nnztot = info.nz_used;
510     PetscPrintf(wcomm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
511                 mype,__FUNCT__,0,N,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
512                 (int)(nnz0/(PetscReal)N),npe);
513   }
514 
515   for ( level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */
516         level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (npe==1 || nactivepe>1); */
517         level++ ){
518     level1 = level + 1;
519 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
520     ierr = PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr );
521 #endif
522 #if defined PETSC_GAMG_USE_LOG
523     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0); CHKERRQ(ierr);
524 #endif
525     { /* construct prolongator */
526       Mat Gmat;
527       PetscCoarsenData *agg_lists;
528 
529       level_bs[level] = bs;
530       ierr = pc_gamg->graph( pc, Aarr[level], &Gmat ); CHKERRQ(ierr);
531       ierr = pc_gamg->coarsen( pc, &Gmat, &agg_lists ); CHKERRQ(ierr);
532       ierr = pc_gamg->prolongator( pc, Aarr[level], Gmat, agg_lists, &Parr[level1] );
533       CHKERRQ(ierr);
534 
535       if( Parr[level1] ){
536         /* get new block size of coarse matrices */
537         if( pc_gamg->col_bs_id != -1 ){
538           PetscBool flag;
539           ierr = PetscObjectComposedDataGetInt( (PetscObject)Parr[level1], pc_gamg->col_bs_id, bs, flag );
540           CHKERRQ( ierr ); assert(flag);
541         }
542       }
543 
544       if( pc_gamg->optprol && Parr[level1] ){
545         /* smooth */
546         ierr = pc_gamg->optprol( pc, Aarr[level], &Parr[level1] ); CHKERRQ(ierr);
547       }
548 
549       ierr = MatDestroy( &Gmat );      CHKERRQ(ierr);
550 
551       if ( pc_gamg->use_aggs_in_gasm ) {
552         ierr = PetscCDGetASMBlocks(agg_lists, level_bs[level], &nASMBlocksArr[level], &ASMLocalIDsArr[level]);  CHKERRQ(ierr);
553       }
554 
555       ierr = PetscCDGetRemovedIS( agg_lists, &removedEqs[level] );  CHKERRQ(ierr);
556 
557       ierr = PetscCDDestroy( agg_lists );  CHKERRQ(ierr);
558     }
559 #if defined PETSC_GAMG_USE_LOG
560     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr);
561 #endif
562     /* cache eigen estimate */
563     if( pc_gamg->emax_id != -1 ){
564       PetscBool flag;
565       ierr = PetscObjectComposedDataGetReal( (PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag );
566       CHKERRQ( ierr );
567       if( !flag ) emaxs[level] = -1.;
568     }
569     else emaxs[level] = -1.;
570     if(level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
571     if( !Parr[level1] ) {
572       if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s stop gridding, level %d\n",mype,__FUNCT__,level);
573       break;
574     }
575 #if defined PETSC_GAMG_USE_LOG
576     ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
577 #endif
578     ierr = createLevel( pc, Aarr[level], bs,
579                         (PetscBool)(level==pc_gamg->Nlevels-2),
580                         &Parr[level1], &nactivepe, &Aarr[level1] );
581     CHKERRQ(ierr);
582 #if defined PETSC_GAMG_USE_LOG
583     ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr);
584 #endif
585     ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr);
586     if (pc_gamg->verbose){
587       PetscInt NN = M;
588       if(pc_gamg->verbose==1) {
589         ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info);
590         ierr = MatGetLocalSize( Aarr[level1], &NN, &N );CHKERRQ(ierr);
591       }
592       else ierr = MatGetInfo(Aarr[level1],MAT_GLOBAL_SUM,&info);
593       CHKERRQ(ierr);
594       nnztot += info.nz_used;
595       PetscPrintf(wcomm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
596                   mype,__FUNCT__,(int)level1,N,pc_gamg->data_cell_cols,
597                   (int)(info.nz_used/(PetscReal)NN), nactivepe );
598       CHKERRQ(ierr);
599     }
600     /* stop if one node */
601     if( M/pc_gamg->data_cell_cols < 2 ) {
602       level++;
603       break;
604     }
605     /* Vec diag; PetscScalar *data_arr,v; PetscInt Istart,Iend,kk,nloceq,id; */
606     /* v = 1.e-10; /\* LU factor has hard wired numbers for small diags so this needs to match (yuk) *\/ */
607     /* ierr = MatGetOwnershipRange(Aarr[level1], &Istart, &Iend); CHKERRQ(ierr); */
608     /* nloceq = Iend-Istart; */
609     /* ierr = MatGetVecs( Aarr[level1], &diag, 0 );    CHKERRQ(ierr); */
610     /* ierr = MatGetDiagonal( Aarr[level1], diag );    CHKERRQ(ierr); */
611     /* ierr = VecGetArray( diag, &data_arr );   CHKERRQ(ierr); */
612     /* for(kk=0;kk<nloceq;kk++){ */
613     /*   if(data_arr[kk]==0.0) { */
614     /*     id = kk + Istart; */
615     /*     ierr = MatSetValues(Aarr[level1],1,&id,1,&id,&v,INSERT_VALUES); */
616     /*     CHKERRQ(ierr); */
617     /*     PetscPrintf(PETSC_COMM_SELF,"\t[%d]%s warning: added zero to diag (%d) on level %d \n",mype,__FUNCT__,id,level1); */
618     /*   } */
619     /* } */
620     /* ierr = VecRestoreArray( diag, &data_arr ); CHKERRQ(ierr); */
621     /* ierr = VecDestroy( &diag );                CHKERRQ(ierr); */
622     /* ierr = MatAssemblyBegin(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); */
623     /* ierr = MatAssemblyEnd(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); */
624 
625 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
626     ierr = PetscLogStagePop(); CHKERRQ( ierr );
627 #endif
628   } /* levels */
629 
630   if( pc_gamg->data ) {
631     ierr = PetscFree( pc_gamg->data ); CHKERRQ( ierr );
632     pc_gamg->data = 0;
633   }
634 
635   if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
636   pc_gamg->Nlevels = level + 1;
637   fine_level = level;
638   ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,PETSC_NULL);CHKERRQ(ierr);
639 
640   /* simple setup */
641   if( !PETSC_TRUE ){
642     PC_MG_Levels **mglevels = mg->levels;
643     for (lidx=0,level=pc_gamg->Nlevels-1;
644          lidx<fine_level;
645          lidx++, level--){
646       ierr = PCMGSetInterpolation( pc, lidx+1, Parr[level] );CHKERRQ(ierr);
647       ierr = KSPSetOperators( mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );CHKERRQ(ierr);
648       ierr = MatDestroy( &Parr[level] );  CHKERRQ(ierr);
649       ierr = MatDestroy( &Aarr[level] );  CHKERRQ(ierr);
650     }
651     ierr = KSPSetOperators( mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN );   CHKERRQ(ierr);
652 
653     ierr = PCSetUp_MG( pc );  CHKERRQ( ierr );
654   }
655   else if( pc_gamg->Nlevels > 1 ) { /* don't setup MG if one level */
656     /* set default smoothers & set operators */
657     for ( lidx = 1, level = pc_gamg->Nlevels-2;
658           lidx <= fine_level;
659           lidx++, level--) {
660       KSP smoother;
661       PC subpc;
662       /* set defaults */
663       ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr);
664       ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr);
665       ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
666 
667       /* override defaults and command line args (!) */
668       if ( pc_gamg->use_aggs_in_gasm ) {
669         PetscInt sz;
670         IS *is;
671         if( PETSC_FALSE ){
672           PetscInt ii;
673           PetscPrintf(PETSC_COMM_SELF,"\t[%d]%s Nblocks=%d\n",mype,__FUNCT__,nASMBlocksArr[level]);
674           for(ii=0;ii<nASMBlocksArr[level];ii++){
675             ISView(ASMLocalIDsArr[level][ii],PETSC_VIEWER_STDOUT_SELF);
676           }
677         }
678         sz = nASMBlocksArr[level];
679         is = ASMLocalIDsArr[level];
680         ierr = PCSetType( subpc, PCGASM ); CHKERRQ(ierr);
681         if(sz==0){
682           IS is;
683           PetscInt my0,kk;
684           ierr = MatGetOwnershipRange( Aarr[level], &my0, &kk ); CHKERRQ(ierr);
685           ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is ); CHKERRQ(ierr);
686           ierr = PCGASMSetLocalSubdomains( subpc, 1, &is, PETSC_NULL ); CHKERRQ(ierr);
687           ierr = ISDestroy( &is ); CHKERRQ(ierr);
688         }
689         else {
690           PetscInt kk;
691           ierr = PCGASMSetLocalSubdomains( subpc, sz, is, PETSC_NULL ); CHKERRQ(ierr);
692           for(kk=0;kk<sz;kk++){
693             ierr = ISDestroy( &is[kk] ); CHKERRQ(ierr);
694           }
695           ierr = PetscFree( is ); CHKERRQ(ierr);
696         }
697         ierr = PCGASMSetOverlap( subpc, 0 ); CHKERRQ(ierr);
698 
699         ASMLocalIDsArr[level] = PETSC_NULL;
700         nASMBlocksArr[level] = 0;
701         ierr = PCGASMSetType( subpc, PC_GASM_BASIC ); CHKERRQ(ierr);
702       }
703       else {
704         ierr = PCSetType( subpc, PCJACOBI ); CHKERRQ(ierr);
705       }
706       ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
707       /* set ops */
708       ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );   CHKERRQ(ierr);
709       ierr = PCMGSetInterpolation( pc, lidx, Parr[level+1] );CHKERRQ(ierr);
710     }
711     {
712       /* coarse grid */
713       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
714       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
715       ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr);
716       ierr = KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ(ierr);
717       ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr);
718       ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
719       ierr = PCSetType( subpc, PCBJACOBI ); CHKERRQ(ierr);
720       ierr = PCSetUp( subpc ); CHKERRQ(ierr);
721       ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr);      assert(ii==1);
722       ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr);
723       ierr = PCSetType( pc2, PCLU ); CHKERRQ(ierr);
724     }
725 
726     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
727     ierr = PetscObjectOptionsBegin( (PetscObject)pc );CHKERRQ(ierr);
728     ierr = PCSetFromOptions_MG( pc ); CHKERRQ(ierr);
729     ierr = PetscOptionsEnd();  CHKERRQ(ierr);
730     if (mg->galerkin != 2) SETERRQ(wcomm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");
731 
732     /* create cheby smoothers */
733     for ( lidx = 1, level = pc_gamg->Nlevels-2;
734           lidx <= fine_level;
735           lidx++, level--) {
736       KSP smoother;
737       PetscBool flag;
738       PC subpc;
739 
740       ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr);
741 
742       /* do my own cheby */
743       ierr = PetscTypeCompare( (PetscObject)smoother, KSPCHEBYCHEV, &flag ); CHKERRQ(ierr);
744       if( flag ) {
745         PetscReal emax, emin;
746         ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr);
747         ierr = PetscTypeCompare( (PetscObject)subpc, PCJACOBI, &flag ); CHKERRQ(ierr);
748         if( flag && emaxs[level] > 0.0 ) emax=emaxs[level]; /* eigen estimate only for diagnal PC */
749         else{ /* eigen estimate 'emax' */
750           KSP eksp; Mat Lmat = Aarr[level];
751           Vec bb, xx;
752 
753           ierr = MatGetVecs( Lmat, &bb, 0 );         CHKERRQ(ierr);
754           ierr = MatGetVecs( Lmat, &xx, 0 );         CHKERRQ(ierr);
755           {
756             PetscRandom    rctx;
757             ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr);
758             ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
759             ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr);
760             ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr);
761           }
762           if( removedEqs[level] ) {
763             PetscScalar *zeros;
764             PetscInt ii,jj, *idx_bs, sz, bs=level_bs[level];
765             const PetscInt *idx;
766             ierr = ISGetLocalSize( removedEqs[level], &sz ); CHKERRQ(ierr);
767             ierr = PetscMalloc( bs*sz*sizeof(PetscScalar), &zeros ); CHKERRQ(ierr);
768             for(ii=0;ii<bs*sz;ii++) zeros[ii] = 0.;
769             ierr = PetscMalloc( bs*sz*sizeof(PetscInt), &idx_bs ); CHKERRQ(ierr);
770             ierr = ISGetIndices( removedEqs[level], &idx); CHKERRQ(ierr);
771             for(ii=0;ii<sz;ii++) {
772               for(jj=0;jj<bs;jj++) {
773                 idx_bs[ii] = bs*idx[ii]+jj;
774               }
775             }
776             ierr = ISRestoreIndices( removedEqs[level], &idx ); CHKERRQ(ierr);
777             if( sz > 0 ) {
778               ierr = VecSetValues( bb, sz, idx_bs, zeros, INSERT_VALUES );  CHKERRQ(ierr);
779             }
780             ierr = PetscFree( idx_bs );  CHKERRQ(ierr);
781             ierr = PetscFree( zeros );  CHKERRQ(ierr);
782             ierr = VecAssemblyBegin(bb); CHKERRQ(ierr);
783             ierr = VecAssemblyEnd(bb); CHKERRQ(ierr);
784             ierr = ISDestroy( &removedEqs[level] );                    CHKERRQ(ierr);
785           }
786           ierr = KSPCreate( wcomm, &eksp );CHKERRQ(ierr);
787           ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 );
788           CHKERRQ(ierr);
789           ierr = KSPSetNormType( eksp, KSP_NORM_NONE );                 CHKERRQ(ierr);
790 
791           ierr = KSPAppendOptionsPrefix( eksp, "est_");         CHKERRQ(ierr);
792           ierr = KSPSetFromOptions( eksp );    CHKERRQ(ierr);
793 
794           ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr);
795           ierr = KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr );
796           ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr);
797 
798           /* set PC type to be same as smoother */
799           ierr = KSPSetPC( eksp, subpc ); CHKERRQ( ierr );
800 
801           /* solve - keep stuff out of logging */
802           ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr);
803           ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr);
804           ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr);
805           ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr);
806           ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr);
807 
808           ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr);
809 
810           ierr = VecDestroy( &xx );       CHKERRQ(ierr);
811           ierr = VecDestroy( &bb );       CHKERRQ(ierr);
812           ierr = KSPDestroy( &eksp );       CHKERRQ(ierr);
813 
814           if( pc_gamg->verbose > 0 ) {
815             PetscInt N1, tt;
816             ierr = MatGetSize( Aarr[level], &N1, &tt );         CHKERRQ(ierr);
817             PetscPrintf(wcomm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1);
818           }
819         }
820         {
821           PetscInt N1, N0, tt;
822           ierr = MatGetSize( Aarr[level], &N1, &tt );         CHKERRQ(ierr);
823           ierr = MatGetSize( Aarr[level+1], &N0, &tt );       CHKERRQ(ierr);
824           /* heuristic - is this crap? */
825           emin = 1.*emax/((PetscReal)N1/(PetscReal)N0);
826           emax *= 1.05;
827         }
828         ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr);
829       } /* setup checby flag */
830     } /* non-coarse levels */
831 
832     /* clean up */
833     for(level=1;level<pc_gamg->Nlevels;level++){
834       ierr = MatDestroy( &Parr[level] );  CHKERRQ(ierr);
835       ierr = MatDestroy( &Aarr[level] );  CHKERRQ(ierr);
836     }
837 
838     ierr = PCSetUp_MG( pc );CHKERRQ( ierr );
839 
840     if( PETSC_FALSE ){
841       KSP smoother;  /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
842       ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr);
843       ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr);
844     }
845   }
846   else {
847     KSP smoother;
848     if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",mype,__FUNCT__);
849     ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr);
850     ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN );   CHKERRQ(ierr);
851     ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr);
852     ierr = PCSetUp_MG( pc );CHKERRQ( ierr );
853   }
854 
855   PetscFunctionReturn(0);
856 }
857 
858 /* ------------------------------------------------------------------------- */
859 /*
860  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
861    that was created with PCCreate_GAMG().
862 
863    Input Parameter:
864 .  pc - the preconditioner context
865 
866    Application Interface Routine: PCDestroy()
867 */
868 #undef __FUNCT__
869 #define __FUNCT__ "PCDestroy_GAMG"
870 PetscErrorCode PCDestroy_GAMG( PC pc )
871 {
872   PetscErrorCode  ierr;
873   PC_MG           *mg = (PC_MG*)pc->data;
874   PC_GAMG         *pc_gamg= (PC_GAMG*)mg->innerctx;
875 
876   PetscFunctionBegin;
877   ierr = PCReset_GAMG( pc );CHKERRQ(ierr);
878   ierr = PetscFree( pc_gamg );CHKERRQ(ierr);
879   ierr = PCDestroy_MG( pc );CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 
884 #undef __FUNCT__
885 #define __FUNCT__ "PCGAMGSetProcEqLim"
886 /*@
887    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
888    processor reduction.
889 
890    Not Collective on PC
891 
892    Input Parameters:
893 .  pc - the preconditioner context
894 
895 
896    Options Database Key:
897 .  -pc_gamg_process_eq_limit
898 
899    Level: intermediate
900 
901    Concepts: Unstructured multrigrid preconditioner
902 
903 .seealso: ()
904 @*/
905 PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
906 {
907   PetscErrorCode ierr;
908 
909   PetscFunctionBegin;
910   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
911   ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
912   PetscFunctionReturn(0);
913 }
914 
915 EXTERN_C_BEGIN
916 #undef __FUNCT__
917 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG"
918 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
919 {
920   PC_MG           *mg = (PC_MG*)pc->data;
921   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
922 
923   PetscFunctionBegin;
924   if(n>0) pc_gamg->min_eq_proc = n;
925   PetscFunctionReturn(0);
926 }
927 EXTERN_C_END
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "PCGAMGSetCoarseEqLim"
931 /*@
932    PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.
933 
934  Collective on PC
935 
936    Input Parameters:
937 .  pc - the preconditioner context
938 
939 
940    Options Database Key:
941 .  -pc_gamg_coarse_eq_limit
942 
943    Level: intermediate
944 
945    Concepts: Unstructured multrigrid preconditioner
946 
947 .seealso: ()
948  @*/
949 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
950 {
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
955   ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
956   PetscFunctionReturn(0);
957 }
958 
959 EXTERN_C_BEGIN
960 #undef __FUNCT__
961 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG"
962 PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
963 {
964   PC_MG           *mg = (PC_MG*)pc->data;
965   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
966 
967   PetscFunctionBegin;
968   if(n>0) pc_gamg->coarse_eq_limit = n;
969   PetscFunctionReturn(0);
970 }
971 EXTERN_C_END
972 
973 #undef __FUNCT__
974 #define __FUNCT__ "PCGAMGSetRepartitioning"
975 /*@
976    PCGAMGSetRepartitioning - Repartition the coarse grids
977 
978    Collective on PC
979 
980    Input Parameters:
981 .  pc - the preconditioner context
982 
983 
984    Options Database Key:
985 .  -pc_gamg_repartition
986 
987    Level: intermediate
988 
989    Concepts: Unstructured multrigrid preconditioner
990 
991 .seealso: ()
992 @*/
993 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
994 {
995   PetscErrorCode ierr;
996 
997   PetscFunctionBegin;
998   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
999   ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
1000   PetscFunctionReturn(0);
1001 }
1002 
1003 EXTERN_C_BEGIN
1004 #undef __FUNCT__
1005 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG"
1006 PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
1007 {
1008   PC_MG           *mg = (PC_MG*)pc->data;
1009   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
1010 
1011   PetscFunctionBegin;
1012   pc_gamg->repart = n;
1013   PetscFunctionReturn(0);
1014 }
1015 EXTERN_C_END
1016 
1017 #undef __FUNCT__
1018 #define __FUNCT__ "PCGAMGSetUseASMAggs"
1019 /*@
1020    PCGAMGSetUseASMAggs -
1021 
1022    Collective on PC
1023 
1024    Input Parameters:
1025 .  pc - the preconditioner context
1026 
1027 
1028    Options Database Key:
1029 .  -pc_gamg_use_agg_gasm
1030 
1031    Level: intermediate
1032 
1033    Concepts: Unstructured multrigrid preconditioner
1034 
1035 .seealso: ()
1036 @*/
1037 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1038 {
1039   PetscErrorCode ierr;
1040 
1041   PetscFunctionBegin;
1042   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1043   ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr);
1044   PetscFunctionReturn(0);
1045 }
1046 
1047 EXTERN_C_BEGIN
1048 #undef __FUNCT__
1049 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG"
1050 PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1051 {
1052   PC_MG           *mg = (PC_MG*)pc->data;
1053   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
1054 
1055   PetscFunctionBegin;
1056   pc_gamg->use_aggs_in_gasm = n;
1057   PetscFunctionReturn(0);
1058 }
1059 EXTERN_C_END
1060 
1061 #undef __FUNCT__
1062 #define __FUNCT__ "PCGAMGSetNlevels"
1063 /*@
1064    PCGAMGSetNlevels -
1065 
1066    Not collective on PC
1067 
1068    Input Parameters:
1069 .  pc - the preconditioner context
1070 
1071 
1072    Options Database Key:
1073 .  -pc_mg_levels
1074 
1075    Level: intermediate
1076 
1077    Concepts: Unstructured multrigrid preconditioner
1078 
1079 .seealso: ()
1080 @*/
1081 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1082 {
1083   PetscErrorCode ierr;
1084 
1085   PetscFunctionBegin;
1086   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1087   ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr);
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 EXTERN_C_BEGIN
1092 #undef __FUNCT__
1093 #define __FUNCT__ "PCGAMGSetNlevels_GAMG"
1094 PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1095 {
1096   PC_MG           *mg = (PC_MG*)pc->data;
1097   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
1098 
1099   PetscFunctionBegin;
1100   pc_gamg->Nlevels = n;
1101   PetscFunctionReturn(0);
1102 }
1103 EXTERN_C_END
1104 
1105 #undef __FUNCT__
1106 #define __FUNCT__ "PCGAMGSetThreshold"
1107 /*@
1108    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1109 
1110    Not collective on PC
1111 
1112    Input Parameters:
1113 .  pc - the preconditioner context
1114 
1115 
1116    Options Database Key:
1117 .  -pc_gamg_threshold
1118 
1119    Level: intermediate
1120 
1121    Concepts: Unstructured multrigrid preconditioner
1122 
1123 .seealso: ()
1124 @*/
1125 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1126 {
1127   PetscErrorCode ierr;
1128 
1129   PetscFunctionBegin;
1130   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1131   ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr);
1132   PetscFunctionReturn(0);
1133 }
1134 
1135 EXTERN_C_BEGIN
1136 #undef __FUNCT__
1137 #define __FUNCT__ "PCGAMGSetThreshold_GAMG"
1138 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1139 {
1140   PC_MG           *mg = (PC_MG*)pc->data;
1141   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
1142 
1143   PetscFunctionBegin;
1144   pc_gamg->threshold = n;
1145   PetscFunctionReturn(0);
1146 }
1147 EXTERN_C_END
1148 
1149 #undef __FUNCT__
1150 #define __FUNCT__ "PCGAMGSetType"
1151 /*@
1152    PCGAMGSetType - Set solution method - calls sub create method
1153 
1154    Collective on PC
1155 
1156    Input Parameters:
1157 .  pc - the preconditioner context
1158 
1159 
1160    Options Database Key:
1161 .  -pc_gamg_type
1162 
1163    Level: intermediate
1164 
1165    Concepts: Unstructured multrigrid preconditioner
1166 
1167 .seealso: ()
1168 @*/
1169 PetscErrorCode PCGAMGSetType( PC pc, const PCGAMGType type )
1170 {
1171   PetscErrorCode ierr;
1172 
1173   PetscFunctionBegin;
1174   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
1175   ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,const PCGAMGType),(pc,type));
1176   CHKERRQ(ierr);
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 EXTERN_C_BEGIN
1181 #undef __FUNCT__
1182 #define __FUNCT__ "PCGAMGSetType_GAMG"
1183 PetscErrorCode PCGAMGSetType_GAMG( PC pc, const PCGAMGType type )
1184 {
1185   PetscErrorCode ierr,(*r)(PC);
1186 
1187   PetscFunctionBegin;
1188   ierr = PetscFListFind(GAMGList,((PetscObject)pc)->comm,type,PETSC_FALSE,(PetscVoidStarFunction)&r);
1189   CHKERRQ(ierr);
1190 
1191   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1192 
1193   /* call sub create method */
1194   ierr = (*r)(pc); CHKERRQ(ierr);
1195 
1196   PetscFunctionReturn(0);
1197 }
1198 EXTERN_C_END
1199 
1200 #undef __FUNCT__
1201 #define __FUNCT__ "PCSetFromOptions_GAMG"
1202 PetscErrorCode PCSetFromOptions_GAMG( PC pc )
1203 {
1204   PetscErrorCode  ierr;
1205   PC_MG           *mg = (PC_MG*)pc->data;
1206   PC_GAMG         *pc_gamg = (PC_GAMG*)mg->innerctx;
1207   PetscBool        flag;
1208   MPI_Comm         wcomm = ((PetscObject)pc)->comm;
1209 
1210   PetscFunctionBegin;
1211   ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr);
1212   {
1213     /* -pc_gamg_verbose */
1214     ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG",
1215                            "none", pc_gamg->verbose,
1216                            &pc_gamg->verbose, PETSC_NULL );
1217     CHKERRQ(ierr);
1218 
1219     /* -pc_gamg_repartition */
1220     ierr = PetscOptionsBool("-pc_gamg_repartition",
1221                             "Repartion coarse grids (false)",
1222                             "PCGAMGRepartitioning",
1223                             pc_gamg->repart,
1224                             &pc_gamg->repart,
1225                             &flag);
1226     CHKERRQ(ierr);
1227 
1228     /* -pc_gamg_use_agg_gasm */
1229     ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm",
1230                             "Use aggregation agragates for GASM smoother (false)",
1231                             "PCGAMGUseASMAggs",
1232                             pc_gamg->use_aggs_in_gasm,
1233                             &pc_gamg->use_aggs_in_gasm,
1234                             &flag);
1235     CHKERRQ(ierr);
1236 
1237     /* -pc_gamg_process_eq_limit */
1238     ierr = PetscOptionsInt("-pc_gamg_process_eq_limit",
1239                            "Limit (goal) on number of equations per process on coarse grids",
1240                            "PCGAMGSetProcEqLim",
1241                            pc_gamg->min_eq_proc,
1242                            &pc_gamg->min_eq_proc,
1243                            &flag );
1244     CHKERRQ(ierr);
1245 
1246     /* -pc_gamg_coarse_eq_limit */
1247     ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit",
1248                            "Limit on number of equations for the coarse grid",
1249                            "PCGAMGSetCoarseEqLim",
1250                            pc_gamg->coarse_eq_limit,
1251                            &pc_gamg->coarse_eq_limit,
1252                            &flag );
1253     CHKERRQ(ierr);
1254 
1255     /* -pc_gamg_threshold */
1256     ierr = PetscOptionsReal("-pc_gamg_threshold",
1257                             "Relative threshold to use for dropping edges in aggregation graph",
1258                             "PCGAMGSetThreshold",
1259                             pc_gamg->threshold,
1260                             &pc_gamg->threshold,
1261                             &flag );
1262     CHKERRQ(ierr);
1263     if(flag && pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);
1264 
1265     ierr = PetscOptionsInt("-pc_mg_levels",
1266                            "Set number of MG levels",
1267                            "PCGAMGSetNlevels",
1268                            pc_gamg->Nlevels,
1269                            &pc_gamg->Nlevels,
1270                            &flag );
1271   }
1272   ierr = PetscOptionsTail();CHKERRQ(ierr);
1273 
1274   PetscFunctionReturn(0);
1275 }
1276 
1277 /* -------------------------------------------------------------------------- */
1278 /*MC
1279      PCCreate_GAMG - Geometric algebraic multigrid (AMG) preconditioning framework.
1280        - This is the entry point to GAMG, registered in pcregis.c
1281 
1282    Options Database Keys:
1283    Multigrid options(inherited)
1284 +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType)
1285 .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1286 .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1287 -  -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
1288 
1289   Level: intermediate
1290 
1291   Concepts: multigrid
1292 
1293 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1294            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(),
1295            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1296            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1297            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1298 M*/
1299 EXTERN_C_BEGIN
1300 #undef __FUNCT__
1301 #define __FUNCT__ "PCCreate_GAMG"
1302 PetscErrorCode  PCCreate_GAMG( PC pc )
1303 {
1304   PetscErrorCode  ierr;
1305   PC_GAMG         *pc_gamg;
1306   PC_MG           *mg;
1307 
1308 #if defined PETSC_GAMG_USE_LOG
1309   static long count = 0;
1310 #endif
1311 
1312   PetscFunctionBegin;
1313   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1314   ierr = PCSetType( pc, PCMG );  CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */
1315   ierr = PetscObjectChangeTypeName( (PetscObject)pc, PCGAMG ); CHKERRQ(ierr);
1316 
1317   /* create a supporting struct and attach it to pc */
1318   ierr = PetscNewLog( pc, PC_GAMG, &pc_gamg ); CHKERRQ(ierr);
1319   mg = (PC_MG*)pc->data;
1320   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally */
1321   mg->innerctx = pc_gamg;
1322 
1323   pc_gamg->setup_count = 0;
1324   /* these should be in subctx but repartitioning needs simple arrays */
1325   pc_gamg->data_sz = 0;
1326   pc_gamg->data = 0;
1327 
1328   /* register AMG type */
1329   if( !GAMGList ){
1330     ierr = PetscFListAdd(&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void(*)(void))PCCreateGAMG_GEO);CHKERRQ(ierr);
1331     ierr = PetscFListAdd(&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void(*)(void))PCCreateGAMG_AGG);CHKERRQ(ierr);
1332   }
1333 
1334   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1335   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1336   pc->ops->setup          = PCSetUp_GAMG;
1337   pc->ops->reset          = PCReset_GAMG;
1338   pc->ops->destroy        = PCDestroy_GAMG;
1339 
1340   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1341 					    "PCGAMGSetProcEqLim_C",
1342 					    "PCGAMGSetProcEqLim_GAMG",
1343 					    PCGAMGSetProcEqLim_GAMG);
1344   CHKERRQ(ierr);
1345 
1346   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1347 					    "PCGAMGSetCoarseEqLim_C",
1348 					    "PCGAMGSetCoarseEqLim_GAMG",
1349 					    PCGAMGSetCoarseEqLim_GAMG);
1350   CHKERRQ(ierr);
1351 
1352   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1353 					    "PCGAMGSetRepartitioning_C",
1354 					    "PCGAMGSetRepartitioning_GAMG",
1355 					    PCGAMGSetRepartitioning_GAMG);
1356   CHKERRQ(ierr);
1357 
1358   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1359 					    "PCGAMGSetUseASMAggs_C",
1360 					    "PCGAMGSetUseASMAggs_GAMG",
1361 					    PCGAMGSetUseASMAggs_GAMG);
1362   CHKERRQ(ierr);
1363 
1364   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1365 					    "PCGAMGSetThreshold_C",
1366 					    "PCGAMGSetThreshold_GAMG",
1367 					    PCGAMGSetThreshold_GAMG);
1368   CHKERRQ(ierr);
1369 
1370   ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1371 					    "PCGAMGSetType_C",
1372 					    "PCGAMGSetType_GAMG",
1373 					    PCGAMGSetType_GAMG);
1374   CHKERRQ(ierr);
1375 
1376   pc_gamg->repart = PETSC_FALSE;
1377   pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1378   pc_gamg->min_eq_proc = 100;
1379   pc_gamg->coarse_eq_limit = 800;
1380   pc_gamg->threshold = 0.05;
1381   pc_gamg->Nlevels = GAMG_MAXLEVELS;
1382   pc_gamg->verbose = 0;
1383   pc_gamg->emax_id = -1;
1384   pc_gamg->col_bs_id = -1;
1385 
1386   /* instantiate derived type */
1387   ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr);
1388   {
1389     char tname[256] = GAMGAGG;
1390     ierr = PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType",
1391                             GAMGList, tname, tname, sizeof(tname), PETSC_NULL );
1392     CHKERRQ(ierr);
1393     ierr = PCGAMGSetType( pc, tname ); CHKERRQ(ierr);
1394   }
1395   ierr = PetscOptionsTail();CHKERRQ(ierr);
1396 
1397   /* private events */
1398 #if defined PETSC_GAMG_USE_LOG
1399   if( count++ == 0 ) {
1400     PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1401     PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1402     /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1403     /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1404     /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1405     PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1406     PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1407     PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1408     PetscLogEventRegister("    search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1409     PetscLogEventRegister("  SA: colect data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1410     PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1411     PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1412     PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1413     PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1414     PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1415     PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1416     PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);
1417 
1418     /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1419     /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1420     /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1421     /* create timer stages */
1422 #if defined GAMG_STAGES
1423     {
1424       char str[32];
1425       sprintf(str,"MG Level %d (finest)",0);
1426       PetscLogStageRegister(str, &gamg_stages[0]);
1427       PetscInt lidx;
1428       for (lidx=1;lidx<9;lidx++){
1429 	sprintf(str,"MG Level %d",lidx);
1430 	PetscLogStageRegister(str, &gamg_stages[lidx]);
1431       }
1432     }
1433 #endif
1434   }
1435 #endif
1436   /* general events */
1437 #if defined PETSC_USE_LOG
1438   PetscLogEventRegister("PCGAMGgraph_AGG", PC_CLASSID, &PC_GAMGGgraph_AGG);
1439   PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);
1440   PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1441   PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1442   PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1443   PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1444   PetscLogEventRegister("PCGAMGProlOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);
1445 #endif
1446 
1447   PetscFunctionReturn(0);
1448 }
1449 EXTERN_C_END
1450