1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include "private/matimpl.h" 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <private/kspimpl.h> 7 8 #if defined PETSC_USE_LOG 9 PetscLogEvent gamg_setup_events[NUM_SET]; 10 #endif 11 #define GAMG_MAXLEVELS 30 12 13 /* #define GAMG_STAGES */ 14 #if (defined PETSC_USE_LOG && defined GAMG_STAGES) 15 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 16 #endif 17 18 static PetscFList GAMGList = 0; 19 20 /* ----------------------------------------------------------------------------- */ 21 #undef __FUNCT__ 22 #define __FUNCT__ "PCReset_GAMG" 23 PetscErrorCode PCReset_GAMG(PC pc) 24 { 25 PetscErrorCode ierr; 26 PC_MG *mg = (PC_MG*)pc->data; 27 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 28 29 PetscFunctionBegin; 30 if( pc_gamg->data != 0 ) { /* this should not happen, cleaned up in SetUp */ 31 ierr = PetscFree(pc_gamg->data); CHKERRQ(ierr); 32 } 33 pc_gamg->data = 0; pc_gamg->data_sz = 0; 34 PetscFunctionReturn(0); 35 } 36 37 /* -------------------------------------------------------------------------- */ 38 /* 39 createLevel: create coarse op with RAP. repartition and/or reduce number 40 of active processors. 41 42 Input Parameter: 43 . pc - parameters 44 . Amat_fine - matrix on this fine (k) level 45 . cbs - coarse block size 46 . isLast - 47 In/Output Parameter: 48 . a_P_inout - prolongation operator to the next level (k-1) 49 . a_nactive_proc - number of active procs 50 Output Parameter: 51 . a_Amat_crs - coarse matrix that is created (k-1) 52 */ 53 54 #undef __FUNCT__ 55 #define __FUNCT__ "createLevel" 56 PetscErrorCode createLevel( const PC pc, 57 const Mat Amat_fine, 58 const PetscInt cbs, 59 const PetscBool isLast, 60 Mat *a_P_inout, 61 PetscMPIInt *a_nactive_proc, 62 Mat *a_Amat_crs 63 ) 64 { 65 PC_MG *mg = (PC_MG*)pc->data; 66 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 67 const PetscBool repart = pc_gamg->repart; 68 const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit; 69 PetscErrorCode ierr; 70 Mat Cmat,Pnew,Pold=*a_P_inout; 71 IS new_indices,isnum; 72 MPI_Comm wcomm = ((PetscObject)Amat_fine)->comm; 73 PetscMPIInt mype,npe,new_npe,nactive = *a_nactive_proc; 74 PetscInt neq,NN,Istart,Iend,Istart0,Iend0,ncrs_new,ncrs0,rfactor; 75 76 PetscFunctionBegin; 77 ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr); 78 ierr = MPI_Comm_size( wcomm, &npe ); CHKERRQ(ierr); 79 80 /* RAP */ 81 #ifdef USE_R 82 /* make R wih brute force for now */ 83 ierr = MatTranspose( Pold, Pnew ); 84 ierr = MatDestroy( &Pold ); CHKERRQ(ierr); 85 ierr = MatRARt( Amat_fine, Pnew, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr); 86 Pold = Pnew; 87 #else 88 ierr = MatPtAP( Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr); 89 #endif 90 ierr = MatSetBlockSize( Cmat, cbs ); CHKERRQ(ierr); 91 ierr = MatGetOwnershipRange( Cmat, &Istart0, &Iend0 ); CHKERRQ(ierr); 92 ncrs0 = (Iend0-Istart0)/cbs; assert((Iend0-Istart0)%cbs == 0); 93 94 /* get number of PEs to make active, reduce */ 95 ierr = MatGetSize( Cmat, &neq, &NN ); CHKERRQ(ierr); 96 new_npe = (PetscMPIInt)((float)neq/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 97 if( new_npe == 0 || neq < coarse_max ) new_npe = 1; 98 else if (new_npe >= nactive ) new_npe = nactive; /* no change, rare */ 99 if( isLast ) new_npe = 1; 100 101 if( !repart && new_npe==nactive ) { 102 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction */ 103 } 104 else { 105 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */ 106 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,data_sz=ndata_rows*ndata_cols; 107 const PetscInt stride0=ncrs0*pc_gamg->data_cell_rows; 108 PetscInt *counts,is_sz,*newproc_idx,ii,jj,kk,strideNew,*tidx,inpe,targetPE; 109 IS isnewproc; 110 VecScatter vecscat; 111 PetscScalar *array; 112 Vec src_crd, dest_crd; 113 IS isscat; 114 115 ierr = PetscMalloc( npe*sizeof(PetscInt), &counts ); CHKERRQ(ierr); 116 #if defined PETSC_USE_LOG 117 ierr = PetscLogEventBegin(gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 118 #endif 119 if( repart ) { 120 /* create sub communicator */ 121 Mat adj; 122 123 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,neq); 124 125 /* MatPartitioningApply call MatConvert, which is collective */ 126 if( cbs == 1 ) { 127 ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 128 } 129 else{ 130 /* make a scalar matrix to partition */ 131 Mat tMat; 132 PetscInt ncols,jj,Ii; 133 const PetscScalar *vals; 134 const PetscInt *idx; 135 PetscInt *d_nnz, *o_nnz; 136 static PetscInt llev = 0; 137 138 ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr); 139 ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &o_nnz ); CHKERRQ(ierr); 140 for ( Ii = Istart0, jj = 0 ; Ii < Iend0 ; Ii += cbs, jj++ ) { 141 ierr = MatGetRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 142 d_nnz[jj] = ncols/cbs; 143 o_nnz[jj] = ncols/cbs; 144 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 145 if( d_nnz[jj] > ncrs0 ) d_nnz[jj] = ncrs0; 146 if( o_nnz[jj] > (neq/cbs-ncrs0) ) o_nnz[jj] = neq/cbs-ncrs0; 147 } 148 149 ierr = MatCreateMPIAIJ( wcomm, ncrs0, ncrs0, 150 PETSC_DETERMINE, PETSC_DETERMINE, 151 0, d_nnz, 0, o_nnz, 152 &tMat ); 153 CHKERRQ(ierr); 154 ierr = PetscFree( d_nnz ); CHKERRQ(ierr); 155 ierr = PetscFree( o_nnz ); CHKERRQ(ierr); 156 157 for ( ii = Istart0; ii < Iend0; ii++ ) { 158 PetscInt dest_row = ii/cbs; 159 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 160 for( jj = 0 ; jj < ncols ; jj++ ){ 161 PetscInt dest_col = idx[jj]/cbs; 162 PetscScalar v = 1.0; 163 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr); 164 } 165 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 166 } 167 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 168 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 169 170 if( llev++ == -1 ) { 171 PetscViewer viewer; char fname[32]; 172 sprintf(fname,"part_mat_%d.mat",llev); 173 PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer); 174 ierr = MatView( tMat, viewer ); CHKERRQ(ierr); 175 ierr = PetscViewerDestroy( &viewer ); 176 } 177 178 ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 179 180 ierr = MatDestroy( &tMat ); CHKERRQ(ierr); 181 } 182 183 { /* partition: get newproc_idx, set is_sz */ 184 char prefix[256]; 185 const char *pcpre; 186 const PetscInt *is_idx; 187 MatPartitioning mpart; 188 IS proc_is; 189 190 ierr = MatPartitioningCreate( wcomm, &mpart ); CHKERRQ(ierr); 191 ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr); 192 ierr = PCGetOptionsPrefix( pc, &pcpre );CHKERRQ(ierr); 193 ierr = PetscSNPrintf(prefix,sizeof prefix,"%spc_gamg_",pcpre?pcpre:"");CHKERRQ(ierr); 194 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 195 ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr); 196 ierr = MatPartitioningSetNParts( mpart, new_npe );CHKERRQ(ierr); 197 ierr = MatPartitioningApply( mpart, &proc_is ); CHKERRQ(ierr); 198 ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr); 199 200 /* collect IS info */ 201 ierr = ISGetLocalSize( proc_is, &is_sz ); CHKERRQ(ierr); 202 ierr = PetscMalloc( cbs*is_sz*sizeof(PetscInt), &newproc_idx ); CHKERRQ(ierr); 203 ierr = ISGetIndices( proc_is, &is_idx ); CHKERRQ(ierr); 204 NN = 1; /* bring to "front" of machine */ 205 /*NN = npe/new_npe;*/ /* spread partitioning across machine */ 206 for( kk = jj = 0 ; kk < is_sz ; kk++ ){ 207 for( ii = 0 ; ii < cbs ; ii++, jj++ ){ 208 newproc_idx[jj] = is_idx[kk] * NN; /* distribution */ 209 } 210 } 211 ierr = ISRestoreIndices( proc_is, &is_idx ); CHKERRQ(ierr); 212 ierr = ISDestroy( &proc_is ); CHKERRQ(ierr); 213 214 is_sz *= cbs; 215 } 216 ierr = MatDestroy( &adj ); CHKERRQ(ierr); 217 218 ierr = ISCreateGeneral( wcomm, is_sz, newproc_idx, PETSC_COPY_VALUES, &isnewproc ); 219 CHKERRQ(ierr); 220 if( newproc_idx != 0 ) { 221 ierr = PetscFree( newproc_idx ); CHKERRQ(ierr); 222 } 223 } 224 else { /* simple aggreagtion of parts */ 225 /* find factor */ 226 if( new_npe == 1 ) rfactor = npe; /* easy */ 227 else { 228 PetscReal best_fact = 0.; 229 jj = -1; 230 for( kk = 1 ; kk <= npe ; kk++ ){ 231 if( npe%kk==0 ) { /* a candidate */ 232 PetscReal nactpe = (PetscReal)npe/(PetscReal)kk, fact = nactpe/(PetscReal)new_npe; 233 if(fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 234 if( fact > best_fact ) { 235 best_fact = fact; jj = kk; 236 } 237 } 238 } 239 if(jj!=-1) rfactor = jj; 240 else rfactor = 1; /* prime? */ 241 } 242 new_npe = npe/rfactor; 243 244 if( new_npe==nactive ) { 245 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction */ 246 ierr = PetscFree( counts ); CHKERRQ(ierr); 247 *a_nactive_proc = new_npe; /* output */ 248 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s aggregate processors noop: new_npe=%d, neq=%d\n",mype,__FUNCT__,new_npe,neq); 249 PetscFunctionReturn(0); 250 } 251 252 /* reduce - isnewproc */ 253 targetPE = mype/rfactor; 254 255 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s aggregate processors: npe: %d --> %d, neq=%d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,neq); 256 is_sz = ncrs0*cbs; 257 ierr = ISCreateStride( wcomm, is_sz, targetPE, 0, &isnewproc ); 258 CHKERRQ(ierr); 259 } 260 261 /* 262 Create an index set from the isnewproc index set to indicate the mapping TO 263 */ 264 ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr); 265 /* 266 Determine how many elements are assigned to each processor 267 */ 268 inpe = npe; 269 ierr = ISPartitioningCount( isnewproc, inpe, counts ); CHKERRQ(ierr); 270 ierr = ISDestroy( &isnewproc ); CHKERRQ(ierr); 271 ncrs_new = counts[mype]/cbs; 272 strideNew = ncrs_new*ndata_rows; 273 #if defined PETSC_USE_LOG 274 ierr = PetscLogEventEnd(gamg_setup_events[SET12],0,0,0,0); CHKERRQ(ierr); 275 #endif 276 /* Create a vector to contain the newly ordered element information */ 277 ierr = VecCreate( wcomm, &dest_crd ); 278 ierr = VecSetSizes( dest_crd, data_sz*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr); 279 ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /* this is needed! */ 280 /* 281 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 282 a block size of ...). Note, ISs are expanded into equation space by 'cbs'. 283 */ 284 ierr = PetscMalloc( (ncrs0*data_sz)*sizeof(PetscInt), &tidx ); CHKERRQ(ierr); 285 ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr); 286 for(ii=0,jj=0; ii<ncrs0 ; ii++) { 287 PetscInt id = idx[ii*cbs]/cbs; /* get node back */ 288 for( kk=0; kk<data_sz ; kk++, jj++) tidx[jj] = id*data_sz + kk; 289 } 290 ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr); 291 ierr = ISCreateGeneral( wcomm, data_sz*ncrs0, tidx, PETSC_COPY_VALUES, &isscat ); 292 CHKERRQ(ierr); 293 ierr = PetscFree( tidx ); CHKERRQ(ierr); 294 /* 295 Create a vector to contain the original vertex information for each element 296 */ 297 ierr = VecCreateSeq( PETSC_COMM_SELF, data_sz*ncrs0, &src_crd ); CHKERRQ(ierr); 298 for( jj=0; jj<ndata_cols ; jj++ ) { 299 for( ii=0 ; ii<ncrs0 ; ii++) { 300 for( kk=0; kk<ndata_rows ; kk++ ) { 301 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*data_sz + kk*ndata_cols + jj; 302 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 303 ierr = VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES ); CHKERRQ(ierr); 304 } 305 } 306 } 307 ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr); 308 ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr); 309 /* 310 Scatter the element vertex information (still in the original vertex ordering) 311 to the correct processor 312 */ 313 ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat); 314 CHKERRQ(ierr); 315 ierr = ISDestroy( &isscat ); CHKERRQ(ierr); 316 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 317 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 318 ierr = VecScatterDestroy( &vecscat ); CHKERRQ(ierr); 319 ierr = VecDestroy( &src_crd ); CHKERRQ(ierr); 320 /* 321 Put the element vertex data into a new allocation of the gdata->ele 322 */ 323 ierr = PetscFree( pc_gamg->data ); CHKERRQ(ierr); 324 ierr = PetscMalloc( data_sz*ncrs_new*sizeof(PetscReal), &pc_gamg->data ); CHKERRQ(ierr); 325 pc_gamg->data_sz = data_sz*ncrs_new; 326 ierr = VecGetArray( dest_crd, &array ); CHKERRQ(ierr); 327 for( jj=0; jj<ndata_cols ; jj++ ) { 328 for( ii=0 ; ii<ncrs_new ; ii++) { 329 for( kk=0; kk<ndata_rows ; kk++ ) { 330 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*data_sz + kk*ndata_cols + jj; 331 pc_gamg->data[ix] = PetscRealPart(array[jx]); 332 array[jx] = 1.e300; 333 } 334 } 335 } 336 ierr = VecRestoreArray( dest_crd, &array ); CHKERRQ(ierr); 337 ierr = VecDestroy( &dest_crd ); CHKERRQ(ierr); 338 #if defined PETSC_USE_LOG 339 ierr = PetscLogEventBegin(gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 340 #endif 341 /* 342 Invert for MatGetSubMatrix 343 */ 344 ierr = ISInvertPermutation( isnum, ncrs_new*cbs, &new_indices ); CHKERRQ(ierr); 345 ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */ 346 ierr = ISDestroy( &isnum ); CHKERRQ(ierr); 347 #if defined PETSC_USE_LOG 348 ierr = PetscLogEventEnd(gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 349 ierr = PetscLogEventBegin(gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 350 #endif 351 /* A_crs output */ 352 ierr = MatGetSubMatrix( Cmat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs ); 353 CHKERRQ(ierr); 354 355 ierr = MatDestroy( &Cmat ); CHKERRQ(ierr); 356 Cmat = *a_Amat_crs; /* output */ 357 ierr = MatSetBlockSize( Cmat, cbs ); CHKERRQ(ierr); 358 #if defined PETSC_USE_LOG 359 ierr = PetscLogEventEnd(gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 360 #endif 361 /* prolongator */ 362 ierr = MatGetOwnershipRange( Pold, &Istart, &Iend ); CHKERRQ(ierr); 363 { 364 IS findices; 365 #if defined PETSC_USE_LOG 366 ierr = PetscLogEventBegin(gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 367 #endif 368 ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices); CHKERRQ(ierr); 369 #ifdef USE_R 370 ierr = MatGetSubMatrix( Pold, new_indices, findices, MAT_INITIAL_MATRIX, &Pnew ); 371 #else 372 ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew ); 373 #endif 374 CHKERRQ(ierr); 375 ierr = ISDestroy( &findices ); CHKERRQ(ierr); 376 #if defined PETSC_USE_LOG 377 ierr = PetscLogEventEnd(gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 378 #endif 379 } 380 ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr); 381 *a_P_inout = Pnew; /* output - repartitioned */ 382 ierr = ISDestroy( &new_indices ); CHKERRQ(ierr); 383 ierr = PetscFree( counts ); CHKERRQ(ierr); 384 } 385 386 *a_nactive_proc = new_npe; /* output */ 387 388 if( !PETSC_TRUE ) { 389 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 390 if(llev==0) { 391 sprintf(fname,"Cmat_%d.m",llev++); 392 PetscViewerASCIIOpen(wcomm,fname,&viewer); 393 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 394 ierr = MatView(Amat_fine, viewer ); CHKERRQ(ierr); 395 ierr = PetscViewerDestroy( &viewer ); 396 } 397 sprintf(fname,"Cmat_%d.m",llev++); 398 PetscViewerASCIIOpen(wcomm,fname,&viewer); 399 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 400 ierr = MatView(Cmat, viewer ); CHKERRQ(ierr); 401 ierr = PetscViewerDestroy( &viewer ); 402 } 403 404 PetscFunctionReturn(0); 405 } 406 407 /* -------------------------------------------------------------------------- */ 408 /* 409 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 410 by setting data structures and options. 411 412 Input Parameter: 413 . pc - the preconditioner context 414 415 Application Interface Routine: PCSetUp() 416 417 Notes: 418 The interface routine PCSetUp() is not usually called directly by 419 the user, but instead is called by PCApply() if necessary. 420 */ 421 #undef __FUNCT__ 422 #define __FUNCT__ "PCSetUp_GAMG" 423 PetscErrorCode PCSetUp_GAMG( PC pc ) 424 { 425 PetscErrorCode ierr; 426 PC_MG *mg = (PC_MG*)pc->data; 427 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 428 Mat Pmat = pc->pmat; 429 PetscInt fine_level,level,level1,M,N,bs,nloc,lidx,Istart,Iend; 430 MPI_Comm wcomm = ((PetscObject)pc)->comm; 431 PetscMPIInt mype,npe,nactivepe; 432 Mat Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS]; 433 PetscReal emaxs[GAMG_MAXLEVELS]; 434 PetscLogDouble nnz0=0,nnztot=0; 435 MatInfo info; 436 437 PetscFunctionBegin; 438 ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr); 439 ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr); 440 if( pc_gamg->setup_count++ > 0 ) { 441 PC_MG_Levels **mglevels = mg->levels; 442 /* just do Galerkin grids */ 443 Mat B,dA,dB; 444 assert(pc->setupcalled); 445 446 if( pc_gamg->Nlevels > 1 ) { 447 /* currently only handle case where mat and pmat are the same on coarser levels */ 448 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);CHKERRQ(ierr); 449 /* (re)set to get dirty flag */ 450 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 451 452 for (level=pc_gamg->Nlevels-2; level>-1; level--) { 453 /* the first time through the matrix structure has changed from repartitioning */ 454 if( pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) { 455 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 456 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 457 mglevels[level]->A = B; 458 } 459 else { 460 ierr = KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);CHKERRQ(ierr); 461 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 462 } 463 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN); CHKERRQ(ierr); 464 dB = B; 465 } 466 } 467 468 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 469 470 /* PCSetUp_MG seems to insists on setting this to GMRES */ 471 ierr = KSPSetType( mglevels[0]->smoothd, KSPPREONLY ); CHKERRQ(ierr); 472 473 PetscFunctionReturn(0); 474 } 475 assert(pc->setupcalled == 0); 476 477 /* GAMG requires input of fine-grid matrix. It determines nlevels. */ 478 ierr = MatGetBlockSize( Pmat, &bs ); CHKERRQ(ierr); 479 ierr = MatGetSize( Pmat, &M, &N );CHKERRQ(ierr); 480 ierr = MatGetOwnershipRange( Pmat, &Istart, &Iend ); CHKERRQ(ierr); 481 nloc = (Iend-Istart)/bs; assert((Iend-Istart)%bs == 0); 482 483 if( pc_gamg->data == 0 && nloc > 0 ) { 484 if(!pc_gamg->createdefaultdata){ 485 SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_LIB,"'createdefaultdata' not set?!?! need to support NULL data!!!"); 486 } 487 ierr = pc_gamg->createdefaultdata( pc ); CHKERRQ(ierr); 488 } 489 490 /* Get A_i and R_i */ 491 if (pc_gamg->verbose) { 492 if(pc_gamg->verbose==1) ierr = MatGetInfo(Pmat,MAT_LOCAL,&info); 493 else ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info); 494 CHKERRQ(ierr); 495 nnz0 = info.nz_used; 496 nnztot = info.nz_used; 497 PetscPrintf(wcomm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 498 mype,__FUNCT__,0,N,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 499 (int)(nnz0/(PetscReal)N),npe); 500 } 501 502 for ( level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */ 503 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (npe==1 || nactivepe>1); */ 504 level++ ){ 505 level1 = level + 1; 506 #if (defined PETSC_USE_LOG && defined GAMG_STAGES) 507 ierr = PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr ); 508 #endif 509 #if defined PETSC_USE_LOG 510 ierr = PetscLogEventBegin(gamg_setup_events[SET1],0,0,0,0); CHKERRQ(ierr); 511 #endif 512 { /* construct prolongator */ 513 Mat Gmat; 514 IS selected, llist; 515 assert(pc_gamg->graph); 516 assert(pc_gamg->coarsen); 517 518 ierr = pc_gamg->graph( pc, Aarr[level], &Gmat ); CHKERRQ(ierr); 519 ierr = pc_gamg->coarsen( pc, Gmat, &selected, &llist ); CHKERRQ(ierr); 520 ierr = pc_gamg->prolongator( pc, Aarr[level], Gmat, selected, llist, &Parr[level1] ); 521 CHKERRQ(ierr); 522 523 if( Parr[level1] ){ 524 /* get new block size of coarse matrices */ 525 if( pc_gamg->col_bs_id != -1 ){ 526 PetscBool flag; 527 ierr = PetscObjectComposedDataGetInt( (PetscObject)Parr[level1], pc_gamg->col_bs_id, bs, flag ); 528 CHKERRQ( ierr ); assert(flag); 529 } 530 } 531 532 if( pc_gamg->optprol && Parr[level1] ){ 533 /* smooth */ 534 ierr = pc_gamg->optprol( pc, Aarr[level], &Parr[level1] ); CHKERRQ(ierr); 535 } 536 537 ierr = MatDestroy( &Gmat ); CHKERRQ(ierr); 538 ierr = ISDestroy( &selected ); CHKERRQ(ierr); 539 ierr = ISDestroy( &llist ); CHKERRQ(ierr); 540 } 541 #if defined PETSC_USE_LOG 542 ierr = PetscLogEventEnd(gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 543 #endif 544 /* cache eigen estimate */ 545 if( pc_gamg->emax_id != -1 ){ 546 PetscBool flag; 547 ierr = PetscObjectComposedDataGetReal( (PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag ); 548 CHKERRQ( ierr ); 549 if( !flag ) emaxs[level] = -1.; 550 } 551 else emaxs[level] = -1.; 552 if(level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 553 if( !Parr[level1] ) { 554 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s stop gridding, level %d\n",mype,__FUNCT__,level); 555 break; 556 } 557 #if defined PETSC_USE_LOG 558 ierr = PetscLogEventBegin(gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 559 #endif 560 ierr = createLevel( pc, Aarr[level], bs, 561 (PetscBool)(level==pc_gamg->Nlevels-2), 562 &Parr[level1], &nactivepe, &Aarr[level1] ); 563 CHKERRQ(ierr); 564 #if defined PETSC_USE_LOG 565 ierr = PetscLogEventEnd(gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 566 #endif 567 ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr); 568 if (pc_gamg->verbose){ 569 ierr = MatGetInfo( Aarr[level1], MAT_GLOBAL_SUM, &info ); CHKERRQ(ierr); 570 PetscPrintf(wcomm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 571 mype,__FUNCT__,(int)level1,N,pc_gamg->data_cell_cols, 572 (int)(info.nz_used/(PetscReal)N),nactivepe); 573 } 574 /* stop if one node */ 575 if( M/pc_gamg->data_cell_cols < 2 ) { 576 level++; 577 break; 578 } 579 /* Vec diag; PetscScalar *data_arr,v; PetscInt Istart,Iend,kk,nloceq,id; */ 580 /* v = 1.e-10; /\* LU factor has hard wired numbers for small diags so this needs to match (yuk) *\/ */ 581 /* ierr = MatGetOwnershipRange(Aarr[level1], &Istart, &Iend); CHKERRQ(ierr); */ 582 /* nloceq = Iend-Istart; */ 583 /* ierr = MatGetVecs( Aarr[level1], &diag, 0 ); CHKERRQ(ierr); */ 584 /* ierr = MatGetDiagonal( Aarr[level1], diag ); CHKERRQ(ierr); */ 585 /* ierr = VecGetArray( diag, &data_arr ); CHKERRQ(ierr); */ 586 /* for(kk=0;kk<nloceq;kk++){ */ 587 /* if(data_arr[kk]==0.0) { */ 588 /* id = kk + Istart; */ 589 /* ierr = MatSetValues(Aarr[level1],1,&id,1,&id,&v,INSERT_VALUES); */ 590 /* CHKERRQ(ierr); */ 591 /* PetscPrintf(PETSC_COMM_SELF,"\t[%d]%s warning: added zero to diag (%d) on level %d \n",mype,__FUNCT__,id,level1); */ 592 /* } */ 593 /* } */ 594 /* ierr = VecRestoreArray( diag, &data_arr ); CHKERRQ(ierr); */ 595 /* ierr = VecDestroy( &diag ); CHKERRQ(ierr); */ 596 /* ierr = MatAssemblyBegin(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); */ 597 /* ierr = MatAssemblyEnd(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); */ 598 599 #if (defined PETSC_USE_LOG && defined GAMG_STAGES) 600 ierr = PetscLogStagePop(); CHKERRQ( ierr ); 601 #endif 602 if(pc_gamg->verbose){ 603 if(pc_gamg->verbose==1) ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info); 604 else ierr = MatGetInfo(Aarr[level1],MAT_GLOBAL_SUM,&info); 605 CHKERRQ(ierr); 606 nnztot += info.nz_used; 607 } 608 } /* levels */ 609 610 if( pc_gamg->data ) { 611 ierr = PetscFree( pc_gamg->data ); CHKERRQ( ierr ); 612 pc_gamg->data = 0; 613 } 614 615 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s %d levels, grid compexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0); 616 pc_gamg->Nlevels = level + 1; 617 fine_level = level; 618 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,PETSC_NULL);CHKERRQ(ierr); 619 620 /* simple setup */ 621 if( !PETSC_TRUE ){ 622 PC_MG_Levels **mglevels = mg->levels; 623 for (lidx=0,level=pc_gamg->Nlevels-1; 624 lidx<fine_level; 625 lidx++, level--){ 626 ierr = PCMGSetInterpolation( pc, lidx+1, Parr[level] );CHKERRQ(ierr); 627 ierr = KSPSetOperators( mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 628 ierr = MatDestroy( &Parr[level] ); CHKERRQ(ierr); 629 ierr = MatDestroy( &Aarr[level] ); CHKERRQ(ierr); 630 } 631 ierr = KSPSetOperators( mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 632 633 ierr = PCSetUp_MG( pc ); CHKERRQ( ierr ); 634 } 635 else if( pc_gamg->Nlevels > 1 ) { /* don't setup MG if one level */ 636 /* set default smoothers & set operators */ 637 for ( lidx = 1, level = pc_gamg->Nlevels-2; 638 lidx <= fine_level; 639 lidx++, level--) { 640 KSP smoother; PC subpc; 641 /* set defaults */ 642 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 643 ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr); 644 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 645 ierr = PCSetType( subpc, PCJACOBI ); CHKERRQ(ierr); 646 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 647 /* set ops */ 648 ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 649 ierr = PCMGSetInterpolation( pc, lidx, Parr[level+1] );CHKERRQ(ierr); 650 } 651 { 652 /* coarse grid */ 653 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 654 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 655 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 656 ierr = KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 657 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 658 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 659 ierr = PCSetType( subpc, PCBJACOBI ); CHKERRQ(ierr); 660 ierr = PCSetUp( subpc ); CHKERRQ(ierr); 661 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); assert(ii==1); 662 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 663 ierr = PCSetType( pc2, PCLU ); CHKERRQ(ierr); 664 } 665 666 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 667 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 668 ierr = PCSetFromOptions_MG(pc); CHKERRQ(ierr); 669 ierr = PetscOptionsEnd();CHKERRQ(ierr); 670 if (mg->galerkin != 2) SETERRQ(wcomm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used."); 671 672 /* create cheby smoothers */ 673 for ( lidx = 1, level = pc_gamg->Nlevels-2; 674 lidx <= fine_level; 675 lidx++, level--) { 676 KSP smoother; 677 PetscBool isCheb; 678 679 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 680 /* do my own cheby */ 681 ierr = PetscTypeCompare( (PetscObject)smoother, KSPCHEBYCHEV, &isCheb ); CHKERRQ(ierr); 682 if( isCheb ) { 683 PetscReal emax, emin; 684 PC subpc; 685 686 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 687 ierr = PetscTypeCompare( (PetscObject)subpc, PCJACOBI, &isCheb ); CHKERRQ(ierr); 688 if( isCheb && emaxs[level] > 0.0 ) emax=emaxs[level]; /* eigen estimate only for diagnal PC */ 689 else{ /* eigen estimate 'emax' */ 690 KSP eksp; Mat Lmat = Aarr[level]; 691 Vec bb, xx; 692 693 ierr = MatGetVecs( Lmat, &bb, 0 ); CHKERRQ(ierr); 694 ierr = MatGetVecs( Lmat, &xx, 0 ); CHKERRQ(ierr); 695 { 696 PetscRandom rctx; 697 ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr); 698 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 699 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 700 ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr); 701 } 702 ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr); 703 ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 ); 704 CHKERRQ(ierr); 705 ierr = KSPSetNormType( eksp, KSP_NORM_NONE ); CHKERRQ(ierr); 706 707 ierr = KSPAppendOptionsPrefix( eksp, "est_"); CHKERRQ(ierr); 708 ierr = KSPSetFromOptions( eksp ); CHKERRQ(ierr); 709 710 ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr); 711 ierr = KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr ); 712 ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr); 713 714 { /* set PC type to be same as smoother - does not get all parameters!!! */ 715 const PCType type; PC pc; 716 ierr = PCGetType( subpc, &type ); CHKERRQ(ierr); 717 ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr ); 718 ierr = PCSetType( pc, type ); CHKERRQ(ierr); 719 } 720 721 /* solve - keep stuff out of logging */ 722 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 723 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 724 ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr); 725 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 726 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 727 728 ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr); 729 730 ierr = VecDestroy( &xx ); CHKERRQ(ierr); 731 ierr = VecDestroy( &bb ); CHKERRQ(ierr); 732 ierr = KSPDestroy( &eksp ); CHKERRQ(ierr); 733 734 if (pc_gamg->verbose) { 735 PetscPrintf(wcomm,"\t\t\t%s PC setup max eigen=%e min=%e\n",__FUNCT__,emax,emin); 736 } 737 } 738 { 739 PetscInt N1, N0, tt; 740 ierr = MatGetSize( Aarr[level], &N1, &tt ); CHKERRQ(ierr); 741 ierr = MatGetSize( Aarr[level+1], &N0, &tt ); CHKERRQ(ierr); 742 /* heuristic - is this crap? */ 743 emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); 744 emax *= 1.05; 745 } 746 ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr); 747 } 748 } 749 750 /* clean up */ 751 for(level=1;level<pc_gamg->Nlevels;level++){ 752 ierr = MatDestroy( &Parr[level] ); CHKERRQ(ierr); 753 ierr = MatDestroy( &Aarr[level] ); CHKERRQ(ierr); 754 } 755 { 756 KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */ 757 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 758 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 759 } 760 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 761 } 762 else { 763 KSP smoother; 764 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",mype,__FUNCT__); 765 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 766 ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 767 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 768 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 769 } 770 771 PetscFunctionReturn(0); 772 } 773 774 /* ------------------------------------------------------------------------- */ 775 /* 776 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 777 that was created with PCCreate_GAMG(). 778 779 Input Parameter: 780 . pc - the preconditioner context 781 782 Application Interface Routine: PCDestroy() 783 */ 784 #undef __FUNCT__ 785 #define __FUNCT__ "PCDestroy_GAMG" 786 PetscErrorCode PCDestroy_GAMG( PC pc ) 787 { 788 PetscErrorCode ierr; 789 PC_MG *mg = (PC_MG*)pc->data; 790 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 791 792 PetscFunctionBegin; 793 ierr = PCReset_GAMG( pc );CHKERRQ(ierr); 794 ierr = PetscFree( pc_gamg );CHKERRQ(ierr); 795 ierr = PCDestroy_MG( pc );CHKERRQ(ierr); 796 PetscFunctionReturn(0); 797 } 798 799 800 #undef __FUNCT__ 801 #define __FUNCT__ "PCGAMGSetProcEqLim" 802 /*@ 803 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via 804 processor reduction. 805 806 Not Collective on PC 807 808 Input Parameters: 809 . pc - the preconditioner context 810 811 812 Options Database Key: 813 . -pc_gamg_process_eq_limit 814 815 Level: intermediate 816 817 Concepts: Unstructured multrigrid preconditioner 818 819 .seealso: () 820 @*/ 821 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 822 { 823 PetscErrorCode ierr; 824 825 PetscFunctionBegin; 826 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 827 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 828 PetscFunctionReturn(0); 829 } 830 831 EXTERN_C_BEGIN 832 #undef __FUNCT__ 833 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 834 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 835 { 836 PC_MG *mg = (PC_MG*)pc->data; 837 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 838 839 PetscFunctionBegin; 840 if(n>0) pc_gamg->min_eq_proc = n; 841 PetscFunctionReturn(0); 842 } 843 EXTERN_C_END 844 845 #undef __FUNCT__ 846 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 847 /*@ 848 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 849 850 Collective on PC 851 852 Input Parameters: 853 . pc - the preconditioner context 854 855 856 Options Database Key: 857 . -pc_gamg_coarse_eq_limit 858 859 Level: intermediate 860 861 Concepts: Unstructured multrigrid preconditioner 862 863 .seealso: () 864 @*/ 865 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 866 { 867 PetscErrorCode ierr; 868 869 PetscFunctionBegin; 870 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 871 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 872 PetscFunctionReturn(0); 873 } 874 875 EXTERN_C_BEGIN 876 #undef __FUNCT__ 877 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 878 PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 879 { 880 PC_MG *mg = (PC_MG*)pc->data; 881 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 882 883 PetscFunctionBegin; 884 if(n>0) pc_gamg->coarse_eq_limit = n; 885 PetscFunctionReturn(0); 886 } 887 EXTERN_C_END 888 889 #undef __FUNCT__ 890 #define __FUNCT__ "PCGAMGSetRepartitioning" 891 /*@ 892 PCGAMGSetRepartitioning - Repartition the coarse grids 893 894 Collective on PC 895 896 Input Parameters: 897 . pc - the preconditioner context 898 899 900 Options Database Key: 901 . -pc_gamg_repartition 902 903 Level: intermediate 904 905 Concepts: Unstructured multrigrid preconditioner 906 907 .seealso: () 908 @*/ 909 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 910 { 911 PetscErrorCode ierr; 912 913 PetscFunctionBegin; 914 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 915 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 916 PetscFunctionReturn(0); 917 } 918 919 EXTERN_C_BEGIN 920 #undef __FUNCT__ 921 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 922 PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 923 { 924 PC_MG *mg = (PC_MG*)pc->data; 925 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 926 927 PetscFunctionBegin; 928 pc_gamg->repart = n; 929 PetscFunctionReturn(0); 930 } 931 EXTERN_C_END 932 933 #undef __FUNCT__ 934 #define __FUNCT__ "PCGAMGSetNlevels" 935 /*@ 936 PCGAMGSetNlevels - 937 938 Not collective on PC 939 940 Input Parameters: 941 . pc - the preconditioner context 942 943 944 Options Database Key: 945 . -pc_mg_levels 946 947 Level: intermediate 948 949 Concepts: Unstructured multrigrid preconditioner 950 951 .seealso: () 952 @*/ 953 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 954 { 955 PetscErrorCode ierr; 956 957 PetscFunctionBegin; 958 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 959 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 960 PetscFunctionReturn(0); 961 } 962 963 EXTERN_C_BEGIN 964 #undef __FUNCT__ 965 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 966 PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 967 { 968 PC_MG *mg = (PC_MG*)pc->data; 969 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 970 971 PetscFunctionBegin; 972 pc_gamg->Nlevels = n; 973 PetscFunctionReturn(0); 974 } 975 EXTERN_C_END 976 977 #undef __FUNCT__ 978 #define __FUNCT__ "PCGAMGSetThreshold" 979 /*@ 980 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 981 982 Not collective on PC 983 984 Input Parameters: 985 . pc - the preconditioner context 986 987 988 Options Database Key: 989 . -pc_gamg_threshold 990 991 Level: intermediate 992 993 Concepts: Unstructured multrigrid preconditioner 994 995 .seealso: () 996 @*/ 997 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 998 { 999 PetscErrorCode ierr; 1000 1001 PetscFunctionBegin; 1002 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1003 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1004 PetscFunctionReturn(0); 1005 } 1006 1007 EXTERN_C_BEGIN 1008 #undef __FUNCT__ 1009 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1010 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1011 { 1012 PC_MG *mg = (PC_MG*)pc->data; 1013 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1014 1015 PetscFunctionBegin; 1016 pc_gamg->threshold = n; 1017 PetscFunctionReturn(0); 1018 } 1019 EXTERN_C_END 1020 1021 #undef __FUNCT__ 1022 #define __FUNCT__ "PCGAMGSetType" 1023 /*@ 1024 PCGAMGSetType - Set solution method - calls sub create method 1025 1026 Collective on PC 1027 1028 Input Parameters: 1029 . pc - the preconditioner context 1030 1031 1032 Options Database Key: 1033 . -pc_gamg_type 1034 1035 Level: intermediate 1036 1037 Concepts: Unstructured multrigrid preconditioner 1038 1039 .seealso: () 1040 @*/ 1041 PetscErrorCode PCGAMGSetType( PC pc, const PCGAMGType type ) 1042 { 1043 PetscErrorCode ierr; 1044 1045 PetscFunctionBegin; 1046 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1047 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,const PCGAMGType),(pc,type)); 1048 CHKERRQ(ierr); 1049 PetscFunctionReturn(0); 1050 } 1051 1052 EXTERN_C_BEGIN 1053 #undef __FUNCT__ 1054 #define __FUNCT__ "PCGAMGSetType_GAMG" 1055 PetscErrorCode PCGAMGSetType_GAMG( PC pc, const PCGAMGType type ) 1056 { 1057 PetscErrorCode ierr,(*r)(PC); 1058 1059 PetscFunctionBegin; 1060 ierr = PetscFListFind(GAMGList,((PetscObject)pc)->comm,type,PETSC_FALSE,(PetscVoidStarFunction)&r); 1061 CHKERRQ(ierr); 1062 1063 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1064 1065 /* call sub create method */ 1066 ierr = (*r)(pc); CHKERRQ(ierr); 1067 1068 PetscFunctionReturn(0); 1069 } 1070 EXTERN_C_END 1071 1072 #undef __FUNCT__ 1073 #define __FUNCT__ "PCSetFromOptions_GAMG" 1074 PetscErrorCode PCSetFromOptions_GAMG( PC pc ) 1075 { 1076 PetscErrorCode ierr; 1077 PC_MG *mg = (PC_MG*)pc->data; 1078 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1079 PetscBool flag; 1080 MPI_Comm wcomm = ((PetscObject)pc)->comm; 1081 1082 PetscFunctionBegin; 1083 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 1084 { 1085 /* -pc_gamg_verbose */ 1086 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG", 1087 "none", pc_gamg->verbose, 1088 &pc_gamg->verbose, PETSC_NULL ); 1089 CHKERRQ(ierr); 1090 1091 /* -pc_gamg_repartition */ 1092 ierr = PetscOptionsBool("-pc_gamg_repartition", 1093 "Repartion coarse grids (false)", 1094 "PCGAMGRepartitioning", 1095 pc_gamg->repart, 1096 &pc_gamg->repart, 1097 &flag); 1098 CHKERRQ(ierr); 1099 1100 /* -pc_gamg_process_eq_limit */ 1101 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit", 1102 "Limit (goal) on number of equations per process on coarse grids", 1103 "PCGAMGSetProcEqLim", 1104 pc_gamg->min_eq_proc, 1105 &pc_gamg->min_eq_proc, 1106 &flag ); 1107 CHKERRQ(ierr); 1108 1109 /* -pc_gamg_coarse_eq_limit */ 1110 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit", 1111 "Limit on number of equations for the coarse grid", 1112 "PCGAMGSetCoarseEqLim", 1113 pc_gamg->coarse_eq_limit, 1114 &pc_gamg->coarse_eq_limit, 1115 &flag ); 1116 CHKERRQ(ierr); 1117 1118 /* -pc_gamg_threshold */ 1119 ierr = PetscOptionsReal("-pc_gamg_threshold", 1120 "Relative threshold to use for dropping edges in aggregation graph", 1121 "PCGAMGSetThreshold", 1122 pc_gamg->threshold, 1123 &pc_gamg->threshold, 1124 &flag ); 1125 CHKERRQ(ierr); 1126 if(flag && pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold); 1127 1128 ierr = PetscOptionsInt("-pc_mg_levels", 1129 "Set number of MG levels", 1130 "PCGAMGSetNlevels", 1131 pc_gamg->Nlevels, 1132 &pc_gamg->Nlevels, 1133 &flag ); 1134 } 1135 ierr = PetscOptionsTail();CHKERRQ(ierr); 1136 1137 PetscFunctionReturn(0); 1138 } 1139 1140 /* -------------------------------------------------------------------------- */ 1141 /*MC 1142 PCCreate_GAMG - Geometric algebraic multigrid (AMG) preconditioning framework. 1143 - This is the entry point to GAMG, registered in pcregis.c 1144 1145 Options Database Keys: 1146 Multigrid options(inherited) 1147 + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType) 1148 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1149 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1150 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade 1151 1152 Level: intermediate 1153 1154 Concepts: multigrid 1155 1156 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType, 1157 PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(), 1158 PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(), 1159 PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(), 1160 PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR() 1161 M*/ 1162 EXTERN_C_BEGIN 1163 #undef __FUNCT__ 1164 #define __FUNCT__ "PCCreate_GAMG" 1165 PetscErrorCode PCCreate_GAMG( PC pc ) 1166 { 1167 PetscErrorCode ierr; 1168 PC_GAMG *pc_gamg; 1169 PC_MG *mg; 1170 PetscClassId cookie; 1171 1172 #if defined PETSC_USE_LOG 1173 static long count = 0; 1174 #endif 1175 1176 PetscFunctionBegin; 1177 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1178 ierr = PCSetType( pc, PCMG ); CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1179 ierr = PetscObjectChangeTypeName( (PetscObject)pc, PCGAMG ); CHKERRQ(ierr); 1180 1181 /* create a supporting struct and attach it to pc */ 1182 ierr = PetscNewLog( pc, PC_GAMG, &pc_gamg ); CHKERRQ(ierr); 1183 mg = (PC_MG*)pc->data; 1184 mg->galerkin = 2; /* Use Galerkin, but it is computed externally */ 1185 mg->innerctx = pc_gamg; 1186 1187 pc_gamg->setup_count = 0; 1188 /* these should be in subctx but repartitioning needs simple arrays */ 1189 pc_gamg->data_sz = 0; 1190 pc_gamg->data = 0; 1191 1192 /* register AMG type */ 1193 if( !GAMGList ){ 1194 ierr = PetscFListAdd(&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void(*)(void))PCCreateGAMG_GEO);CHKERRQ(ierr); 1195 ierr = PetscFListAdd(&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void(*)(void))PCCreateGAMG_AGG);CHKERRQ(ierr); 1196 } 1197 1198 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1199 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1200 pc->ops->setup = PCSetUp_GAMG; 1201 pc->ops->reset = PCReset_GAMG; 1202 pc->ops->destroy = PCDestroy_GAMG; 1203 1204 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1205 "PCGAMGSetProcEqLim_C", 1206 "PCGAMGSetProcEqLim_GAMG", 1207 PCGAMGSetProcEqLim_GAMG); 1208 CHKERRQ(ierr); 1209 1210 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1211 "PCGAMGSetCoarseEqLim_C", 1212 "PCGAMGSetCoarseEqLim_GAMG", 1213 PCGAMGSetCoarseEqLim_GAMG); 1214 CHKERRQ(ierr); 1215 1216 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1217 "PCGAMGSetRepartitioning_C", 1218 "PCGAMGSetRepartitioning_GAMG", 1219 PCGAMGSetRepartitioning_GAMG); 1220 CHKERRQ(ierr); 1221 1222 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1223 "PCGAMGSetThreshold_C", 1224 "PCGAMGSetThreshold_GAMG", 1225 PCGAMGSetThreshold_GAMG); 1226 CHKERRQ(ierr); 1227 1228 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1229 "PCGAMGSetType_C", 1230 "PCGAMGSetType_GAMG", 1231 PCGAMGSetType_GAMG); 1232 CHKERRQ(ierr); 1233 1234 pc_gamg->repart = PETSC_FALSE; 1235 pc_gamg->min_eq_proc = 100; 1236 pc_gamg->coarse_eq_limit = 800; 1237 pc_gamg->threshold = 0.05; 1238 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1239 pc_gamg->verbose = 0; 1240 pc_gamg->emax_id = -1; 1241 pc_gamg->col_bs_id = -1; 1242 1243 /* instantiate derived type */ 1244 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 1245 { 1246 char tname[256] = GAMGAGG; 1247 ierr = PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType", 1248 GAMGList, tname, tname, sizeof(tname), PETSC_NULL ); 1249 CHKERRQ(ierr); 1250 ierr = PCGAMGSetType( pc, tname ); CHKERRQ(ierr); 1251 } 1252 ierr = PetscOptionsTail();CHKERRQ(ierr); 1253 1254 #if defined PETSC_USE_LOG 1255 if( count++ == 0 ) { 1256 PetscClassIdRegister("GAMG Setup",&cookie); 1257 PetscLogEventRegister("GAMG: createProl", cookie, &gamg_setup_events[SET1]); 1258 PetscLogEventRegister(" Graph", cookie, &gamg_setup_events[GRAPH]); 1259 /* PetscLogEventRegister(" G.Mat", cookie, &gamg_setup_events[GRAPH_MAT]); */ 1260 /* PetscLogEventRegister(" G.Filter", cookie, &gamg_setup_events[GRAPH_FILTER]); */ 1261 /* PetscLogEventRegister(" G.Square", cookie, &gamg_setup_events[GRAPH_SQR]); */ 1262 PetscLogEventRegister(" MIS/Agg", cookie, &gamg_setup_events[SET4]); 1263 PetscLogEventRegister(" geo: growSupp", cookie, &gamg_setup_events[SET5]); 1264 PetscLogEventRegister(" geo: triangle", cookie, &gamg_setup_events[SET6]); 1265 PetscLogEventRegister(" search&set", cookie, &gamg_setup_events[FIND_V]); 1266 PetscLogEventRegister(" SA: colect data", cookie, &gamg_setup_events[SET7]); 1267 PetscLogEventRegister(" SA: frmProl0", cookie, &gamg_setup_events[SET8]); 1268 PetscLogEventRegister(" SA: smooth", cookie, &gamg_setup_events[SET9]); 1269 PetscLogEventRegister("GAMG: partLevel", cookie, &gamg_setup_events[SET2]); 1270 PetscLogEventRegister(" repartition", cookie, &gamg_setup_events[SET12]); 1271 PetscLogEventRegister(" Invert-Sort", cookie, &gamg_setup_events[SET13]); 1272 PetscLogEventRegister(" Move A", cookie, &gamg_setup_events[SET14]); 1273 PetscLogEventRegister(" Move P", cookie, &gamg_setup_events[SET15]); 1274 1275 /* PetscLogEventRegister(" PL move data", cookie, &gamg_setup_events[SET13]); */ 1276 /* PetscLogEventRegister("GAMG: fix", cookie, &gamg_setup_events[SET10]); */ 1277 /* PetscLogEventRegister("GAMG: set levels", cookie, &gamg_setup_events[SET11]); */ 1278 /* create timer stages */ 1279 #if defined GAMG_STAGES 1280 { 1281 char str[32]; 1282 sprintf(str,"MG Level %d (finest)",0); 1283 PetscLogStageRegister(str, &gamg_stages[0]); 1284 PetscInt lidx; 1285 for (lidx=1;lidx<9;lidx++){ 1286 sprintf(str,"MG Level %d",lidx); 1287 PetscLogStageRegister(str, &gamg_stages[lidx]); 1288 } 1289 } 1290 #endif 1291 } 1292 #endif 1293 PetscFunctionReturn(0); 1294 } 1295 EXTERN_C_END 1296