1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include "petsc-private/matimpl.h" 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGgraph_AGG; 14 PetscLogEvent PC_GAMGGgraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptprol_AGG; 20 PetscLogEvent PC_GAMGKKTProl_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFList GAMGList = 0; 31 32 /* ----------------------------------------------------------------------------- */ 33 #undef __FUNCT__ 34 #define __FUNCT__ "PCReset_GAMG" 35 PetscErrorCode PCReset_GAMG(PC pc) 36 { 37 PetscErrorCode ierr; 38 PC_MG *mg = (PC_MG*)pc->data; 39 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 40 41 PetscFunctionBegin; 42 if( pc_gamg->data ) { /* this should not happen, cleaned up in SetUp */ 43 ierr = PetscFree( pc_gamg->data ); CHKERRQ(ierr); 44 } 45 pc_gamg->data = PETSC_NULL; pc_gamg->data_sz = 0; 46 PetscFunctionReturn(0); 47 } 48 49 /* private 2x2 Mat Nest for Stokes */ 50 typedef struct{ 51 Mat A11,A21,A12,Amat; 52 IS prim_is,constr_is; 53 }GAMGKKTMat; 54 55 #undef __FUNCT__ 56 #define __FUNCT__ "GAMGKKTMatCreate" 57 static PetscErrorCode GAMGKKTMatCreate( Mat A, PetscBool iskkt, GAMGKKTMat *out ) 58 { 59 PetscFunctionBegin; 60 out->Amat = A; 61 if( iskkt ){ 62 PetscErrorCode ierr; 63 IS is_constraint, is_prime; 64 PetscInt nmin,nmax; 65 66 ierr = MatGetOwnershipRange( A, &nmin, &nmax ); CHKERRQ(ierr); 67 ierr = MatFindZeroDiagonals( A, &is_constraint ); CHKERRQ(ierr); 68 ierr = ISComplement( is_constraint, nmin, nmax, &is_prime ); CHKERRQ(ierr); 69 out->prim_is = is_prime; 70 out->constr_is = is_constraint; 71 72 ierr = MatGetSubMatrix( A, is_prime, is_prime, MAT_INITIAL_MATRIX, &out->A11); CHKERRQ(ierr); 73 ierr = MatGetSubMatrix( A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12); CHKERRQ(ierr); 74 ierr = MatGetSubMatrix( A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21); CHKERRQ(ierr); 75 PetscPrintf(((PetscObject)A)->comm,"[%d]%s N=%d N_11=%d\n",0,__FUNCT__,A->rmap->N,out->A11->rmap->N); 76 } 77 else { 78 out->A11 = A; 79 out->A21 = PETSC_NULL; 80 out->A12 = PETSC_NULL; 81 out->prim_is = PETSC_NULL; 82 out->constr_is = PETSC_NULL; 83 } 84 PetscFunctionReturn(0); 85 } 86 87 #undef __FUNCT__ 88 #define __FUNCT__ "GAMGKKTMatDestroy" 89 static PetscErrorCode GAMGKKTMatDestroy( GAMGKKTMat *mat ) 90 { 91 PetscErrorCode ierr; 92 93 PetscFunctionBegin; 94 if( mat->A11 && mat->A11 != mat->Amat ) { 95 ierr = MatDestroy( &mat->A11 ); CHKERRQ(ierr); 96 } 97 ierr = MatDestroy( &mat->A21 ); CHKERRQ(ierr); 98 ierr = MatDestroy( &mat->A12 ); CHKERRQ(ierr); 99 100 ierr = ISDestroy( &mat->prim_is ); CHKERRQ(ierr); 101 ierr = ISDestroy( &mat->constr_is ); CHKERRQ(ierr); 102 103 PetscFunctionReturn(0); 104 } 105 106 /* -------------------------------------------------------------------------- */ 107 /* 108 createLevel: create coarse op with RAP. repartition and/or reduce number 109 of active processors. 110 111 Input Parameter: 112 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 113 'pc_gamg->data_sz' are changed via repartitioning/reduction. 114 . Amat_fine - matrix on this fine (k) level 115 . cbs - coarse block size 116 . isLast - 117 . stokes - 118 In/Output Parameter: 119 . a_P_inout - prolongation operator to the next level (k-->k-1) 120 . a_nactive_proc - number of active procs 121 Output Parameter: 122 . a_Amat_crs - coarse matrix that is created (k-1) 123 */ 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "createLevel" 127 static PetscErrorCode createLevel( const PC pc, 128 const Mat Amat_fine, 129 const PetscInt cbs, 130 const PetscBool isLast, 131 const PetscBool stokes, 132 Mat *a_P_inout, 133 Mat *a_Amat_crs, 134 PetscMPIInt *a_nactive_proc 135 ) 136 { 137 PetscErrorCode ierr; 138 PC_MG *mg = (PC_MG*)pc->data; 139 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 140 const PetscBool repart = pc_gamg->repart; 141 const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit; 142 Mat Cmat,Pold=*a_P_inout; 143 MPI_Comm wcomm = ((PetscObject)Amat_fine)->comm; 144 PetscMPIInt mype,npe,new_npe,nactive=*a_nactive_proc; 145 PetscInt ncrs_eq,ncrs_prim; 146 147 PetscFunctionBegin; 148 ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr); 149 ierr = MPI_Comm_size( wcomm, &npe ); CHKERRQ(ierr); 150 /* RAP */ 151 ierr = MatPtAP( Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr); 152 153 /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/ 154 ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 155 assert(pc_gamg->data_sz%(pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)==0); 156 ierr = MatGetLocalSize( Cmat, &ncrs_eq, PETSC_NULL ); CHKERRQ(ierr); 157 158 /* get number of PEs to make active 'new_npe', reduce, can be any integer 1-P */ 159 { 160 PetscInt ncrs_eq_glob,ncrs_eq_ave; 161 ierr = MatGetSize( Cmat, &ncrs_eq_glob, PETSC_NULL ); CHKERRQ(ierr); 162 ncrs_eq_ave = ncrs_eq_glob/npe; 163 new_npe = (PetscMPIInt)((float)ncrs_eq_ave/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 164 if( new_npe == 0 || ncrs_eq_ave < coarse_max ) new_npe = 1; 165 else if ( new_npe >= nactive ) new_npe = nactive; /* no change, rare */ 166 if( isLast ) new_npe = 1; 167 } 168 169 if( !repart && new_npe==nactive ) { 170 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 171 } 172 else { 173 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 174 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old; 175 IS is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices; 176 VecScatter vecscat; 177 PetscScalar *array; 178 Vec src_crd, dest_crd; 179 180 nloc_old = ncrs_eq/cbs; assert(ncrs_eq%cbs==0); 181 #if defined PETSC_GAMG_USE_LOG 182 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 183 #endif 184 /* make 'is_eq_newproc' */ 185 ierr = PetscMalloc( npe*sizeof(PetscInt), &counts ); CHKERRQ(ierr); 186 if( repart && !stokes ) { 187 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 188 Mat adj; 189 190 if (pc_gamg->verbose>0) { 191 if (pc_gamg->verbose==1) PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,ncrs_eq); 192 else { 193 PetscInt n; 194 ierr = MPI_Allreduce( &ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, wcomm );CHKERRQ(ierr); 195 PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,n); 196 } 197 } 198 199 /* get 'adj' */ 200 if( cbs == 1 ) { 201 ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 202 } 203 else{ 204 /* make a scalar matrix to partition (no Stokes here) */ 205 Mat tMat; 206 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 207 const PetscScalar *vals; 208 const PetscInt *idx; 209 PetscInt *d_nnz, *o_nnz, M, N; 210 static PetscInt llev = 0; 211 212 ierr = PetscMalloc( ncrs_prim*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr); 213 ierr = PetscMalloc( ncrs_prim*sizeof(PetscInt), &o_nnz ); CHKERRQ(ierr); 214 ierr = MatGetOwnershipRange( Cmat, &Istart_crs, &Iend_crs ); CHKERRQ(ierr); 215 ierr = MatGetSize( Cmat, &M, &N );CHKERRQ(ierr); 216 for ( Ii = Istart_crs, jj = 0 ; Ii < Iend_crs ; Ii += cbs, jj++ ) { 217 ierr = MatGetRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 218 d_nnz[jj] = ncols/cbs; 219 o_nnz[jj] = ncols/cbs; 220 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 221 if( d_nnz[jj] > ncrs_prim ) d_nnz[jj] = ncrs_prim; 222 if( o_nnz[jj] > (M/cbs-ncrs_prim) ) o_nnz[jj] = M/cbs-ncrs_prim; 223 } 224 225 ierr = MatCreate( wcomm, &tMat ); CHKERRQ(ierr); 226 ierr = MatSetSizes( tMat, ncrs_prim, ncrs_prim, 227 PETSC_DETERMINE, PETSC_DETERMINE ); 228 CHKERRQ(ierr); 229 ierr = MatSetType(tMat,MATAIJ); CHKERRQ(ierr); 230 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 231 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 232 ierr = PetscFree( d_nnz ); CHKERRQ(ierr); 233 ierr = PetscFree( o_nnz ); CHKERRQ(ierr); 234 235 for ( ii = Istart_crs; ii < Iend_crs; ii++ ) { 236 PetscInt dest_row = ii/cbs; 237 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 238 for( jj = 0 ; jj < ncols ; jj++ ){ 239 PetscInt dest_col = idx[jj]/cbs; 240 PetscScalar v = 1.0; 241 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr); 242 } 243 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 244 } 245 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 246 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 247 248 if( llev++ == -1 ) { 249 PetscViewer viewer; char fname[32]; 250 ierr = PetscSNPrintf(fname,sizeof fname,"part_mat_%D.mat",llev);CHKERRQ(ierr); 251 PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer); 252 ierr = MatView( tMat, viewer ); CHKERRQ(ierr); 253 ierr = PetscViewerDestroy( &viewer ); 254 } 255 256 ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 257 258 ierr = MatDestroy( &tMat ); CHKERRQ(ierr); 259 } /* create 'adj' */ 260 261 { /* partition: get newproc_idx */ 262 char prefix[256]; 263 const char *pcpre; 264 const PetscInt *is_idx; 265 MatPartitioning mpart; 266 IS proc_is; 267 PetscInt targetPE; 268 269 ierr = MatPartitioningCreate( wcomm, &mpart ); CHKERRQ(ierr); 270 ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr); 271 ierr = PCGetOptionsPrefix( pc, &pcpre );CHKERRQ(ierr); 272 ierr = PetscSNPrintf(prefix,sizeof prefix,"%spc_gamg_",pcpre?pcpre:"");CHKERRQ(ierr); 273 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 274 ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr); 275 ierr = MatPartitioningSetNParts( mpart, new_npe );CHKERRQ(ierr); 276 ierr = MatPartitioningApply( mpart, &proc_is ); CHKERRQ(ierr); 277 ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr); 278 279 /* collect IS info */ 280 ierr = PetscMalloc( ncrs_eq*sizeof(PetscInt), &newproc_idx ); CHKERRQ(ierr); 281 ierr = ISGetIndices( proc_is, &is_idx ); CHKERRQ(ierr); 282 targetPE = 1; /* bring to "front" of machine */ 283 /*targetPE = npe/new_npe;*/ /* spread partitioning across machine */ 284 for( kk = jj = 0 ; kk < nloc_old ; kk++ ){ 285 for( ii = 0 ; ii < cbs ; ii++, jj++ ){ 286 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 287 } 288 } 289 ierr = ISRestoreIndices( proc_is, &is_idx ); CHKERRQ(ierr); 290 ierr = ISDestroy( &proc_is ); CHKERRQ(ierr); 291 } 292 ierr = MatDestroy( &adj ); CHKERRQ(ierr); 293 294 ierr = ISCreateGeneral( wcomm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc ); 295 CHKERRQ(ierr); 296 if( newproc_idx != 0 ) { 297 ierr = PetscFree( newproc_idx ); CHKERRQ(ierr); 298 } 299 } /* repartitioning */ 300 else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 301 302 PetscInt rfactor,targetPE; 303 /* find factor */ 304 if( new_npe == 1 ) rfactor = npe; /* easy */ 305 else { 306 PetscReal best_fact = 0.; 307 jj = -1; 308 for( kk = 1 ; kk <= npe ; kk++ ){ 309 if( npe%kk==0 ) { /* a candidate */ 310 PetscReal nactpe = (PetscReal)npe/(PetscReal)kk, fact = nactpe/(PetscReal)new_npe; 311 if(fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 312 if( fact > best_fact ) { 313 best_fact = fact; jj = kk; 314 } 315 } 316 } 317 if( jj != -1 ) rfactor = jj; 318 else rfactor = 1; /* does this happen .. a prime */ 319 } 320 new_npe = npe/rfactor; 321 322 if( new_npe==nactive ) { 323 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 324 ierr = PetscFree( counts ); CHKERRQ(ierr); 325 if (pc_gamg->verbose>0){ 326 PetscPrintf(wcomm,"\t[%d]%s aggregate processors noop: new_npe=%d, neq(loc)=%d\n",mype,__FUNCT__,new_npe,ncrs_eq); 327 } 328 PetscFunctionReturn(0); 329 } 330 331 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",mype,__FUNCT__,ncrs_eq); 332 targetPE = mype/rfactor; 333 ierr = ISCreateStride( wcomm, ncrs_eq, targetPE, 0, &is_eq_newproc ); CHKERRQ(ierr); 334 335 if( stokes ) { 336 ierr = ISCreateStride( wcomm, ncrs_prim*cbs, targetPE, 0, &is_eq_newproc_prim ); CHKERRQ(ierr); 337 } 338 } /* end simple 'is_eq_newproc' */ 339 340 /* 341 Create an index set from the is_eq_newproc index set to indicate the mapping TO 342 */ 343 ierr = ISPartitioningToNumbering( is_eq_newproc, &is_eq_num ); CHKERRQ(ierr); 344 if( stokes ) { 345 ierr = ISPartitioningToNumbering( is_eq_newproc_prim, &is_eq_num_prim ); CHKERRQ(ierr); 346 } 347 else is_eq_num_prim = is_eq_num; 348 /* 349 Determine how many equations/vertices are assigned to each processor 350 */ 351 ierr = ISPartitioningCount( is_eq_newproc, npe, counts ); CHKERRQ(ierr); 352 ncrs_eq_new = counts[mype]; 353 ierr = ISDestroy( &is_eq_newproc ); CHKERRQ(ierr); 354 if( stokes ) { 355 ierr = ISPartitioningCount( is_eq_newproc_prim, npe, counts ); CHKERRQ(ierr); 356 ierr = ISDestroy( &is_eq_newproc_prim ); CHKERRQ(ierr); 357 ncrs_prim_new = counts[mype]/cbs; /* nodes */ 358 } 359 else ncrs_prim_new = ncrs_eq_new/cbs; /* eqs */ 360 361 ierr = PetscFree( counts ); CHKERRQ(ierr); 362 #if defined PETSC_GAMG_USE_LOG 363 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0); CHKERRQ(ierr); 364 #endif 365 366 /* move data (for primal equations only) */ 367 /* Create a vector to contain the newly ordered element information */ 368 ierr = VecCreate( wcomm, &dest_crd ); 369 ierr = VecSetSizes( dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE ); CHKERRQ(ierr); 370 ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /* this is needed! */ 371 /* 372 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 373 a block size of ...). Note, ISs are expanded into equation space by 'cbs'. 374 */ 375 ierr = PetscMalloc( (ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx ); CHKERRQ(ierr); 376 ierr = ISGetIndices( is_eq_num_prim, &idx ); CHKERRQ(ierr); 377 for(ii=0,jj=0; ii<ncrs_prim ; ii++) { 378 PetscInt id = idx[ii*cbs]/cbs; /* get node back */ 379 for( kk=0; kk<node_data_sz ; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 380 } 381 ierr = ISRestoreIndices( is_eq_num_prim, &idx ); CHKERRQ(ierr); 382 ierr = ISCreateGeneral( wcomm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat ); 383 CHKERRQ(ierr); 384 ierr = PetscFree( tidx ); CHKERRQ(ierr); 385 /* 386 Create a vector to contain the original vertex information for each element 387 */ 388 ierr = VecCreateSeq( PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd ); CHKERRQ(ierr); 389 for( jj=0; jj<ndata_cols ; jj++ ) { 390 const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows; 391 for( ii=0 ; ii<ncrs_prim ; ii++) { 392 for( kk=0; kk<ndata_rows ; kk++ ) { 393 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 394 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 395 ierr = VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES ); CHKERRQ(ierr); 396 } 397 } 398 } 399 ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr); 400 ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr); 401 /* 402 Scatter the element vertex information (still in the original vertex ordering) 403 to the correct processor 404 */ 405 ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat); 406 CHKERRQ(ierr); 407 ierr = ISDestroy( &isscat ); CHKERRQ(ierr); 408 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 409 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 410 ierr = VecScatterDestroy( &vecscat ); CHKERRQ(ierr); 411 ierr = VecDestroy( &src_crd ); CHKERRQ(ierr); 412 /* 413 Put the element vertex data into a new allocation of the gdata->ele 414 */ 415 ierr = PetscFree( pc_gamg->data ); CHKERRQ(ierr); 416 ierr = PetscMalloc( node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data ); CHKERRQ(ierr); 417 pc_gamg->data_sz = node_data_sz*ncrs_prim_new; 418 strideNew = ncrs_prim_new*ndata_rows; 419 ierr = VecGetArray( dest_crd, &array ); CHKERRQ(ierr); 420 for( jj=0; jj<ndata_cols ; jj++ ) { 421 for( ii=0 ; ii<ncrs_prim_new ; ii++) { 422 for( kk=0; kk<ndata_rows ; kk++ ) { 423 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 424 pc_gamg->data[ix] = PetscRealPart(array[jx]); 425 } 426 } 427 } 428 ierr = VecRestoreArray( dest_crd, &array ); CHKERRQ(ierr); 429 ierr = VecDestroy( &dest_crd ); CHKERRQ(ierr); 430 431 /* move A and P (columns) with new layout */ 432 #if defined PETSC_GAMG_USE_LOG 433 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 434 #endif 435 436 /* 437 Invert for MatGetSubMatrix 438 */ 439 ierr = ISInvertPermutation( is_eq_num, ncrs_eq_new, &new_eq_indices ); CHKERRQ(ierr); 440 ierr = ISSort( new_eq_indices ); CHKERRQ(ierr); /* is this needed? */ 441 if(is_eq_num != is_eq_num_prim) { 442 ierr = ISDestroy( &is_eq_num_prim ); CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 443 } 444 ierr = ISDestroy( &is_eq_num ); CHKERRQ(ierr); 445 #if defined PETSC_GAMG_USE_LOG 446 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 447 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 448 #endif 449 /* 'a_Amat_crs' output */ 450 { 451 Mat mat; 452 ierr = MatGetSubMatrix( Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat ); 453 CHKERRQ(ierr); 454 *a_Amat_crs = mat; 455 } 456 ierr = MatDestroy( &Cmat ); CHKERRQ(ierr); 457 458 #if defined PETSC_GAMG_USE_LOG 459 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 460 #endif 461 /* prolongator */ 462 { 463 IS findices; 464 PetscInt Istart,Iend; 465 Mat Pnew; 466 ierr = MatGetOwnershipRange( Pold, &Istart, &Iend ); CHKERRQ(ierr); 467 #if defined PETSC_GAMG_USE_LOG 468 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 469 #endif 470 ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices); CHKERRQ(ierr); 471 ierr = MatGetSubMatrix( Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew ); 472 CHKERRQ(ierr); 473 ierr = ISDestroy( &findices ); CHKERRQ(ierr); 474 #if defined PETSC_GAMG_USE_LOG 475 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 476 #endif 477 ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr); 478 479 /* output - repartitioned */ 480 *a_P_inout = Pnew; 481 } 482 ierr = ISDestroy( &new_eq_indices ); CHKERRQ(ierr); 483 484 *a_nactive_proc = new_npe; /* output */ 485 } 486 487 /* outout matrix data */ 488 if( !PETSC_TRUE ) { 489 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 490 if(llev==0) { 491 sprintf(fname,"Cmat_%d.m",llev++); 492 PetscViewerASCIIOpen(wcomm,fname,&viewer); 493 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 494 ierr = MatView(Amat_fine, viewer ); CHKERRQ(ierr); 495 ierr = PetscViewerDestroy( &viewer ); 496 } 497 sprintf(fname,"Cmat_%d.m",llev++); 498 PetscViewerASCIIOpen(wcomm,fname,&viewer); 499 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 500 ierr = MatView(Cmat, viewer ); CHKERRQ(ierr); 501 ierr = PetscViewerDestroy( &viewer ); 502 } 503 504 PetscFunctionReturn(0); 505 } 506 507 /* -------------------------------------------------------------------------- */ 508 /* 509 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 510 by setting data structures and options. 511 512 Input Parameter: 513 . pc - the preconditioner context 514 515 Application Interface Routine: PCSetUp() 516 517 Notes: 518 The interface routine PCSetUp() is not usually called directly by 519 the user, but instead is called by PCApply() if necessary. 520 */ 521 #undef __FUNCT__ 522 #define __FUNCT__ "PCSetUp_GAMG" 523 PetscErrorCode PCSetUp_GAMG( PC pc ) 524 { 525 PetscErrorCode ierr; 526 PC_MG *mg = (PC_MG*)pc->data; 527 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 528 Mat Pmat = pc->pmat; 529 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 530 MPI_Comm wcomm = ((PetscObject)pc)->comm; 531 PetscMPIInt mype,npe,nactivepe; 532 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 533 PetscReal emaxs[GAMG_MAXLEVELS]; 534 IS *ASMLocalIDsArr[GAMG_MAXLEVELS],removedEqs[GAMG_MAXLEVELS]; 535 PetscInt level_bs[GAMG_MAXLEVELS]; 536 GAMGKKTMat kktMatsArr[GAMG_MAXLEVELS]; 537 PetscLogDouble nnz0=0.,nnztot=0.; 538 MatInfo info; 539 PetscBool stokes = PETSC_FALSE; 540 541 PetscFunctionBegin; 542 ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr); 543 ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr); 544 if (pc_gamg->verbose>2) PetscPrintf(wcomm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",mype,__FUNCT__,pc_gamg->setup_count,pc->setupcalled); 545 if( pc_gamg->setup_count++ > 0 ) { 546 PC_MG_Levels **mglevels = mg->levels; 547 /* just do Galerkin grids */ 548 Mat B,dA,dB; 549 assert(pc->setupcalled); 550 551 if( pc_gamg->Nlevels > 1 ) { 552 /* currently only handle case where mat and pmat are the same on coarser levels */ 553 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);CHKERRQ(ierr); 554 /* (re)set to get dirty flag */ 555 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 556 557 for (level=pc_gamg->Nlevels-2; level>-1; level--) { 558 /* the first time through the matrix structure has changed from repartitioning */ 559 if( pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) { 560 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 561 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 562 mglevels[level]->A = B; 563 } 564 else { 565 ierr = KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);CHKERRQ(ierr); 566 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 567 } 568 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN); CHKERRQ(ierr); 569 dB = B; 570 } 571 } 572 573 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 574 575 /* PCSetUp_MG seems to insists on setting this to GMRES */ 576 ierr = KSPSetType( mglevels[0]->smoothd, KSPPREONLY ); CHKERRQ(ierr); 577 578 PetscFunctionReturn(0); 579 } 580 assert(pc->setupcalled == 0); 581 582 ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,PETSC_NULL);CHKERRQ(ierr); 583 584 ierr = GAMGKKTMatCreate( Pmat, stokes, &kktMatsArr[0] ); CHKERRQ(ierr); 585 586 if( pc_gamg->data == 0 ) { 587 if( !pc_gamg->createdefaultdata ){ 588 SETERRQ(wcomm,PETSC_ERR_LIB,"'createdefaultdata' not set(?) need to support NULL data"); 589 } 590 if( stokes ) { 591 SETERRQ(wcomm,PETSC_ERR_LIB,"Need data (eg, PCSetCoordinates) for Stokes problems"); 592 } 593 ierr = pc_gamg->createdefaultdata( pc, kktMatsArr[0].A11 ); CHKERRQ(ierr); 594 } 595 596 /* get basic dims */ 597 if( stokes ) { 598 bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */ 599 } 600 else { 601 ierr = MatGetBlockSize( Pmat, &bs ); CHKERRQ(ierr); 602 } 603 604 ierr = MatGetSize( Pmat, &M, &qq );CHKERRQ(ierr); 605 if (pc_gamg->verbose) { 606 if(pc_gamg->verbose==1) ierr = MatGetInfo(Pmat,MAT_LOCAL,&info); 607 else ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info); 608 CHKERRQ(ierr); 609 nnz0 = info.nz_used; 610 nnztot = info.nz_used; 611 PetscPrintf(wcomm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 612 mype,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 613 (int)(nnz0/(PetscReal)M),npe); 614 } 615 616 /* Get A_i and R_i */ 617 for ( level=0, Aarr[0]=Pmat, nactivepe = npe; /* hard wired stopping logic */ 618 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (npe==1 || nactivepe>1); */ 619 level++ ){ 620 level1 = level + 1; 621 #if defined PETSC_GAMG_USE_LOG 622 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0); CHKERRQ(ierr); 623 #if (defined GAMG_STAGES) 624 ierr = PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr ); 625 #endif 626 #endif 627 /* deal with Stokes, get sub matrices */ 628 if( level > 0 ) { 629 ierr = GAMGKKTMatCreate( Aarr[level], stokes, &kktMatsArr[level] ); CHKERRQ(ierr); 630 } 631 { /* construct prolongator */ 632 Mat Gmat; 633 PetscCoarsenData *agg_lists; 634 Mat Prol11,Prol22; 635 636 level_bs[level] = bs; 637 ierr = pc_gamg->graph( pc,kktMatsArr[level].A11, &Gmat ); CHKERRQ(ierr); 638 ierr = pc_gamg->coarsen( pc, &Gmat, &agg_lists ); CHKERRQ(ierr); 639 ierr = pc_gamg->prolongator( pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11 ); CHKERRQ(ierr); 640 641 /* could have failed to create new level */ 642 if( Prol11 ){ 643 /* get new block size of coarse matrices */ 644 ierr = MatGetBlockSizes( Prol11, PETSC_NULL, &bs ); CHKERRQ(ierr); 645 646 if( stokes ) { 647 if(!pc_gamg->formkktprol) SETERRQ(wcomm,PETSC_ERR_USER,"Stokes not supportd by AMG method."); 648 /* R A12 == (T = A21 P)'; G = T' T; coarsen G; form plain agg with G */ 649 ierr = pc_gamg->formkktprol( pc, Prol11, kktMatsArr[level].A21, &Prol22 ); CHKERRQ(ierr); 650 } 651 652 if( pc_gamg->optprol ){ 653 /* smooth */ 654 ierr = pc_gamg->optprol( pc, kktMatsArr[level].A11, &Prol11 ); CHKERRQ(ierr); 655 } 656 657 if( stokes ) { 658 IS is_row[2] = {kktMatsArr[level].prim_is,kktMatsArr[level].constr_is}; 659 Mat a[4] = {Prol11, PETSC_NULL, PETSC_NULL, Prol22 }; 660 ierr = MatCreateNest(wcomm,2,is_row, 2, is_row, a, &Parr[level1] ); CHKERRQ(ierr); 661 } 662 else { 663 Parr[level1] = Prol11; 664 } 665 } 666 else Parr[level1] = PETSC_NULL; 667 668 if ( pc_gamg->use_aggs_in_gasm ) { 669 ierr = PetscCDGetASMBlocks(agg_lists, level_bs[level], &nASMBlocksArr[level], &ASMLocalIDsArr[level] ); CHKERRQ(ierr); 670 } 671 672 ierr = PetscCDGetRemovedIS( agg_lists, &removedEqs[level] ); CHKERRQ(ierr); 673 674 ierr = MatDestroy( &Gmat ); CHKERRQ(ierr); 675 ierr = PetscCDDestroy( agg_lists ); CHKERRQ(ierr); 676 } /* construct prolongator scope */ 677 #if defined PETSC_GAMG_USE_LOG 678 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 679 #endif 680 /* cache eigen estimate */ 681 if( pc_gamg->emax_id != -1 ){ 682 PetscBool flag; 683 ierr = PetscObjectComposedDataGetReal( (PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag ); 684 CHKERRQ( ierr ); 685 if( !flag ) emaxs[level] = -1.; 686 } 687 else emaxs[level] = -1.; 688 if(level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 689 if( !Parr[level1] ) { 690 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s stop gridding, level %d\n",mype,__FUNCT__,level); 691 break; 692 } 693 #if defined PETSC_GAMG_USE_LOG 694 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 695 #endif 696 697 ierr = createLevel( pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2), 698 stokes, &Parr[level1], &Aarr[level1], &nactivepe ); 699 CHKERRQ(ierr); 700 701 #if defined PETSC_GAMG_USE_LOG 702 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 703 #endif 704 ierr = MatGetSize( Aarr[level1], &M, &qq );CHKERRQ(ierr); 705 706 if (pc_gamg->verbose > 0){ 707 PetscInt NN = M; 708 if(pc_gamg->verbose==1) { 709 ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info); CHKERRQ(ierr); 710 ierr = MatGetLocalSize( Aarr[level1], &NN, &qq ); 711 } 712 else ierr = MatGetInfo( Aarr[level1], MAT_GLOBAL_SUM, &info ); 713 714 CHKERRQ(ierr); 715 nnztot += info.nz_used; 716 PetscPrintf(wcomm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 717 mype,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols, 718 (int)(info.nz_used/(PetscReal)NN), nactivepe ); 719 CHKERRQ(ierr); 720 } 721 722 /* stop if one node -- could pull back for singular problems */ 723 if( M/pc_gamg->data_cell_cols < 2 ) { 724 level++; 725 break; 726 } 727 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 728 ierr = PetscLogStagePop(); CHKERRQ( ierr ); 729 #endif 730 } /* levels */ 731 732 if( pc_gamg->data ) { 733 ierr = PetscFree( pc_gamg->data ); CHKERRQ( ierr ); 734 pc_gamg->data = PETSC_NULL; 735 } 736 737 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0); 738 pc_gamg->Nlevels = level + 1; 739 fine_level = level; 740 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,PETSC_NULL);CHKERRQ(ierr); 741 742 /* simple setup */ 743 if( !PETSC_TRUE ){ 744 PC_MG_Levels **mglevels = mg->levels; 745 for (lidx=0,level=pc_gamg->Nlevels-1; 746 lidx<fine_level; 747 lidx++, level--){ 748 ierr = PCMGSetInterpolation( pc, lidx+1, Parr[level] );CHKERRQ(ierr); 749 ierr = KSPSetOperators( mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );CHKERRQ(ierr); 750 ierr = MatDestroy( &Parr[level] ); CHKERRQ(ierr); 751 ierr = MatDestroy( &Aarr[level] ); CHKERRQ(ierr); 752 } 753 ierr = KSPSetOperators( mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 754 755 ierr = PCSetUp_MG( pc ); CHKERRQ( ierr ); 756 } 757 else if( pc_gamg->Nlevels > 1 ) { /* don't setup MG if one level */ 758 /* set default smoothers & set operators */ 759 for ( lidx = 1, level = pc_gamg->Nlevels-2; 760 lidx <= fine_level; 761 lidx++, level--) { 762 KSP smoother; 763 PC subpc; 764 765 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 766 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 767 768 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 769 /* set ops */ 770 ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 771 ierr = PCMGSetInterpolation( pc, lidx, Parr[level+1] );CHKERRQ(ierr); 772 773 /* create field split PC, get subsmoother */ 774 if( stokes ) { 775 KSP *ksps; 776 PetscInt nn; 777 ierr = PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is); CHKERRQ(ierr); 778 ierr = PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is); CHKERRQ(ierr); 779 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps); CHKERRQ(ierr); 780 smoother = ksps[0]; 781 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 782 ierr = PetscFree( ksps ); CHKERRQ(ierr); 783 } 784 ierr = GAMGKKTMatDestroy( &kktMatsArr[level] ); CHKERRQ(ierr); 785 786 /* set defaults */ 787 ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr); 788 789 /* override defaults and command line args (!) */ 790 if ( pc_gamg->use_aggs_in_gasm ) { 791 PetscInt sz; 792 IS *is; 793 794 sz = nASMBlocksArr[level]; 795 is = ASMLocalIDsArr[level]; 796 ierr = PCSetType( subpc, PCGASM ); CHKERRQ(ierr); 797 if(sz==0){ 798 IS is; 799 PetscInt my0,kk; 800 ierr = MatGetOwnershipRange( Aarr[level], &my0, &kk ); CHKERRQ(ierr); 801 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is ); CHKERRQ(ierr); 802 ierr = PCGASMSetLocalSubdomains( subpc, 1, &is, PETSC_NULL ); CHKERRQ(ierr); 803 ierr = ISDestroy( &is ); CHKERRQ(ierr); 804 } 805 else { 806 PetscInt kk; 807 ierr = PCGASMSetLocalSubdomains( subpc, sz, is, PETSC_NULL ); CHKERRQ(ierr); 808 for(kk=0;kk<sz;kk++){ 809 ierr = ISDestroy( &is[kk] ); CHKERRQ(ierr); 810 } 811 ierr = PetscFree( is ); CHKERRQ(ierr); 812 } 813 ierr = PCGASMSetOverlap( subpc, 0 ); CHKERRQ(ierr); 814 815 ASMLocalIDsArr[level] = PETSC_NULL; 816 nASMBlocksArr[level] = 0; 817 ierr = PCGASMSetType( subpc, PC_GASM_BASIC ); CHKERRQ(ierr); 818 } 819 else { 820 ierr = PCSetType( subpc, PCJACOBI ); CHKERRQ(ierr); 821 } 822 } 823 { 824 /* coarse grid */ 825 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 826 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 827 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 828 ierr = KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 829 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 830 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 831 ierr = PCSetType( subpc, PCBJACOBI ); CHKERRQ(ierr); 832 ierr = PCSetUp( subpc ); CHKERRQ(ierr); 833 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); assert(ii==1); 834 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 835 ierr = PCSetType( pc2, PCLU ); CHKERRQ(ierr); 836 } 837 838 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 839 ierr = PetscObjectOptionsBegin( (PetscObject)pc );CHKERRQ(ierr); 840 ierr = PCSetFromOptions_MG( pc ); CHKERRQ(ierr); 841 ierr = PetscOptionsEnd(); CHKERRQ(ierr); 842 if (mg->galerkin != 2) SETERRQ(wcomm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used."); 843 844 /* create cheby smoothers */ 845 for ( lidx = 1, level = pc_gamg->Nlevels-2; 846 lidx <= fine_level; 847 lidx++, level--) { 848 KSP smoother; 849 PetscBool flag; 850 PC subpc; 851 852 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 853 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 854 855 /* create field split PC, get subsmoother */ 856 if( stokes ) { 857 KSP *ksps; 858 PetscInt nn; 859 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps); CHKERRQ(ierr); 860 smoother = ksps[0]; 861 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 862 ierr = PetscFree( ksps ); CHKERRQ(ierr); 863 } 864 865 /* do my own cheby */ 866 ierr = PetscObjectTypeCompare( (PetscObject)smoother, KSPCHEBYCHEV, &flag ); CHKERRQ(ierr); 867 if( flag ) { 868 PetscReal emax, emin; 869 ierr = PetscObjectTypeCompare( (PetscObject)subpc, PCJACOBI, &flag ); CHKERRQ(ierr); 870 if( flag && emaxs[level] > 0.0 ) emax=emaxs[level]; /* eigen estimate only for diagnal PC */ 871 else{ /* eigen estimate 'emax' */ 872 KSP eksp; Mat Lmat = Aarr[level]; 873 Vec bb, xx; 874 875 ierr = MatGetVecs( Lmat, &bb, 0 ); CHKERRQ(ierr); 876 ierr = MatGetVecs( Lmat, &xx, 0 ); CHKERRQ(ierr); 877 { 878 PetscRandom rctx; 879 ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr); 880 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 881 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 882 ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr); 883 } 884 885 if( removedEqs[level] ) { 886 /* being very careful - zeroing out BC rows (this is not done in agg.c estimates) */ 887 PetscScalar *zeros; 888 PetscInt ii,jj, *idx_bs, sz, bs=level_bs[level]; 889 const PetscInt *idx; 890 ierr = ISGetLocalSize( removedEqs[level], &sz ); CHKERRQ(ierr); 891 ierr = PetscMalloc( bs*sz*sizeof(PetscScalar), &zeros ); CHKERRQ(ierr); 892 for(ii=0;ii<bs*sz;ii++) zeros[ii] = 0.; 893 ierr = PetscMalloc( bs*sz*sizeof(PetscInt), &idx_bs ); CHKERRQ(ierr); 894 ierr = ISGetIndices( removedEqs[level], &idx); CHKERRQ(ierr); 895 for(ii=0;ii<sz;ii++) { 896 for(jj=0;jj<bs;jj++) { 897 idx_bs[ii] = bs*idx[ii]+jj; 898 } 899 } 900 ierr = ISRestoreIndices( removedEqs[level], &idx ); CHKERRQ(ierr); 901 if( sz > 0 ) { 902 ierr = VecSetValues( bb, sz, idx_bs, zeros, INSERT_VALUES ); CHKERRQ(ierr); 903 } 904 ierr = PetscFree( idx_bs ); CHKERRQ(ierr); 905 ierr = PetscFree( zeros ); CHKERRQ(ierr); 906 ierr = VecAssemblyBegin(bb); CHKERRQ(ierr); 907 ierr = VecAssemblyEnd(bb); CHKERRQ(ierr); 908 } 909 ierr = KSPCreate( wcomm, &eksp );CHKERRQ(ierr); 910 ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 ); 911 CHKERRQ(ierr); 912 ierr = KSPSetNormType( eksp, KSP_NORM_NONE ); CHKERRQ(ierr); 913 ierr = KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);CHKERRQ(ierr); 914 ierr = KSPAppendOptionsPrefix( eksp, "gamg_est_"); CHKERRQ(ierr); 915 ierr = KSPSetFromOptions( eksp ); CHKERRQ(ierr); 916 917 ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr); 918 ierr = KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr ); 919 ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr); 920 921 /* set PC type to be same as smoother */ 922 ierr = KSPSetPC( eksp, subpc ); CHKERRQ( ierr ); 923 924 /* solve - keep stuff out of logging */ 925 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 926 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 927 ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr); 928 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 929 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 930 931 ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr); 932 933 ierr = VecDestroy( &xx ); CHKERRQ(ierr); 934 ierr = VecDestroy( &bb ); CHKERRQ(ierr); 935 ierr = KSPDestroy( &eksp ); CHKERRQ(ierr); 936 937 if( pc_gamg->verbose > 0 ) { 938 PetscInt N1, tt; 939 ierr = MatGetSize( Aarr[level], &N1, &tt ); CHKERRQ(ierr); 940 PetscPrintf(wcomm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1); 941 } 942 } 943 { 944 PetscInt N1, N0, tt; 945 ierr = MatGetSize( Aarr[level], &N1, &tt ); CHKERRQ(ierr); 946 ierr = MatGetSize( Aarr[level+1], &N0, &tt ); CHKERRQ(ierr); 947 /* heuristic - is this crap? */ 948 emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); 949 emax *= 1.05; 950 } 951 ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr); 952 } /* setup checby flag */ 953 954 if( removedEqs[level] ) { 955 ierr = ISDestroy( &removedEqs[level] ); CHKERRQ(ierr); 956 } 957 } /* non-coarse levels */ 958 959 /* clean up */ 960 for(level=1;level<pc_gamg->Nlevels;level++){ 961 ierr = MatDestroy( &Parr[level] ); CHKERRQ(ierr); 962 ierr = MatDestroy( &Aarr[level] ); CHKERRQ(ierr); 963 } 964 965 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 966 967 if( PETSC_FALSE ){ 968 KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */ 969 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 970 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 971 } 972 } 973 else { 974 KSP smoother; 975 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",mype,__FUNCT__); 976 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 977 ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 978 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 979 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 980 } 981 982 PetscFunctionReturn(0); 983 } 984 985 /* ------------------------------------------------------------------------- */ 986 /* 987 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 988 that was created with PCCreate_GAMG(). 989 990 Input Parameter: 991 . pc - the preconditioner context 992 993 Application Interface Routine: PCDestroy() 994 */ 995 #undef __FUNCT__ 996 #define __FUNCT__ "PCDestroy_GAMG" 997 PetscErrorCode PCDestroy_GAMG( PC pc ) 998 { 999 PetscErrorCode ierr; 1000 PC_MG *mg = (PC_MG*)pc->data; 1001 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 1002 1003 PetscFunctionBegin; 1004 ierr = PCReset_GAMG( pc );CHKERRQ(ierr); 1005 ierr = PetscFree( pc_gamg );CHKERRQ(ierr); 1006 ierr = PCDestroy_MG( pc );CHKERRQ(ierr); 1007 PetscFunctionReturn(0); 1008 } 1009 1010 1011 #undef __FUNCT__ 1012 #define __FUNCT__ "PCGAMGSetProcEqLim" 1013 /*@ 1014 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via 1015 processor reduction. 1016 1017 Not Collective on PC 1018 1019 Input Parameters: 1020 . pc - the preconditioner context 1021 1022 1023 Options Database Key: 1024 . -pc_gamg_process_eq_limit 1025 1026 Level: intermediate 1027 1028 Concepts: Unstructured multrigrid preconditioner 1029 1030 .seealso: () 1031 @*/ 1032 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 1033 { 1034 PetscErrorCode ierr; 1035 1036 PetscFunctionBegin; 1037 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1038 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1039 PetscFunctionReturn(0); 1040 } 1041 1042 EXTERN_C_BEGIN 1043 #undef __FUNCT__ 1044 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 1045 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 1046 { 1047 PC_MG *mg = (PC_MG*)pc->data; 1048 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1049 1050 PetscFunctionBegin; 1051 if(n>0) pc_gamg->min_eq_proc = n; 1052 PetscFunctionReturn(0); 1053 } 1054 EXTERN_C_END 1055 1056 #undef __FUNCT__ 1057 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 1058 /*@ 1059 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 1060 1061 Collective on PC 1062 1063 Input Parameters: 1064 . pc - the preconditioner context 1065 1066 1067 Options Database Key: 1068 . -pc_gamg_coarse_eq_limit 1069 1070 Level: intermediate 1071 1072 Concepts: Unstructured multrigrid preconditioner 1073 1074 .seealso: () 1075 @*/ 1076 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 1077 { 1078 PetscErrorCode ierr; 1079 1080 PetscFunctionBegin; 1081 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1082 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1083 PetscFunctionReturn(0); 1084 } 1085 1086 EXTERN_C_BEGIN 1087 #undef __FUNCT__ 1088 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 1089 PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 1090 { 1091 PC_MG *mg = (PC_MG*)pc->data; 1092 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1093 1094 PetscFunctionBegin; 1095 if(n>0) pc_gamg->coarse_eq_limit = n; 1096 PetscFunctionReturn(0); 1097 } 1098 EXTERN_C_END 1099 1100 #undef __FUNCT__ 1101 #define __FUNCT__ "PCGAMGSetRepartitioning" 1102 /*@ 1103 PCGAMGSetRepartitioning - Repartition the coarse grids 1104 1105 Collective on PC 1106 1107 Input Parameters: 1108 . pc - the preconditioner context 1109 1110 1111 Options Database Key: 1112 . -pc_gamg_repartition 1113 1114 Level: intermediate 1115 1116 Concepts: Unstructured multrigrid preconditioner 1117 1118 .seealso: () 1119 @*/ 1120 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 1121 { 1122 PetscErrorCode ierr; 1123 1124 PetscFunctionBegin; 1125 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1126 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1127 PetscFunctionReturn(0); 1128 } 1129 1130 EXTERN_C_BEGIN 1131 #undef __FUNCT__ 1132 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 1133 PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 1134 { 1135 PC_MG *mg = (PC_MG*)pc->data; 1136 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1137 1138 PetscFunctionBegin; 1139 pc_gamg->repart = n; 1140 PetscFunctionReturn(0); 1141 } 1142 EXTERN_C_END 1143 1144 #undef __FUNCT__ 1145 #define __FUNCT__ "PCGAMGSetUseASMAggs" 1146 /*@ 1147 PCGAMGSetUseASMAggs - 1148 1149 Collective on PC 1150 1151 Input Parameters: 1152 . pc - the preconditioner context 1153 1154 1155 Options Database Key: 1156 . -pc_gamg_use_agg_gasm 1157 1158 Level: intermediate 1159 1160 Concepts: Unstructured multrigrid preconditioner 1161 1162 .seealso: () 1163 @*/ 1164 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 1165 { 1166 PetscErrorCode ierr; 1167 1168 PetscFunctionBegin; 1169 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1170 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1171 PetscFunctionReturn(0); 1172 } 1173 1174 EXTERN_C_BEGIN 1175 #undef __FUNCT__ 1176 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 1177 PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 1178 { 1179 PC_MG *mg = (PC_MG*)pc->data; 1180 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1181 1182 PetscFunctionBegin; 1183 pc_gamg->use_aggs_in_gasm = n; 1184 PetscFunctionReturn(0); 1185 } 1186 EXTERN_C_END 1187 1188 #undef __FUNCT__ 1189 #define __FUNCT__ "PCGAMGSetNlevels" 1190 /*@ 1191 PCGAMGSetNlevels - 1192 1193 Not collective on PC 1194 1195 Input Parameters: 1196 . pc - the preconditioner context 1197 1198 1199 Options Database Key: 1200 . -pc_mg_levels 1201 1202 Level: intermediate 1203 1204 Concepts: Unstructured multrigrid preconditioner 1205 1206 .seealso: () 1207 @*/ 1208 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1209 { 1210 PetscErrorCode ierr; 1211 1212 PetscFunctionBegin; 1213 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1214 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1215 PetscFunctionReturn(0); 1216 } 1217 1218 EXTERN_C_BEGIN 1219 #undef __FUNCT__ 1220 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1221 PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1222 { 1223 PC_MG *mg = (PC_MG*)pc->data; 1224 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1225 1226 PetscFunctionBegin; 1227 pc_gamg->Nlevels = n; 1228 PetscFunctionReturn(0); 1229 } 1230 EXTERN_C_END 1231 1232 #undef __FUNCT__ 1233 #define __FUNCT__ "PCGAMGSetThreshold" 1234 /*@ 1235 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1236 1237 Not collective on PC 1238 1239 Input Parameters: 1240 . pc - the preconditioner context 1241 1242 1243 Options Database Key: 1244 . -pc_gamg_threshold 1245 1246 Level: intermediate 1247 1248 Concepts: Unstructured multrigrid preconditioner 1249 1250 .seealso: () 1251 @*/ 1252 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1253 { 1254 PetscErrorCode ierr; 1255 1256 PetscFunctionBegin; 1257 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1258 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1259 PetscFunctionReturn(0); 1260 } 1261 1262 EXTERN_C_BEGIN 1263 #undef __FUNCT__ 1264 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1265 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1266 { 1267 PC_MG *mg = (PC_MG*)pc->data; 1268 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1269 1270 PetscFunctionBegin; 1271 pc_gamg->threshold = n; 1272 PetscFunctionReturn(0); 1273 } 1274 EXTERN_C_END 1275 1276 #undef __FUNCT__ 1277 #define __FUNCT__ "PCGAMGSetType" 1278 /*@ 1279 PCGAMGSetType - Set solution method - calls sub create method 1280 1281 Collective on PC 1282 1283 Input Parameters: 1284 . pc - the preconditioner context 1285 1286 1287 Options Database Key: 1288 . -pc_gamg_type 1289 1290 Level: intermediate 1291 1292 Concepts: Unstructured multrigrid preconditioner 1293 1294 .seealso: () 1295 @*/ 1296 PetscErrorCode PCGAMGSetType( PC pc, const PCGAMGType type ) 1297 { 1298 PetscErrorCode ierr; 1299 1300 PetscFunctionBegin; 1301 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1302 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,const PCGAMGType),(pc,type)); 1303 CHKERRQ(ierr); 1304 PetscFunctionReturn(0); 1305 } 1306 1307 EXTERN_C_BEGIN 1308 #undef __FUNCT__ 1309 #define __FUNCT__ "PCGAMGSetType_GAMG" 1310 PetscErrorCode PCGAMGSetType_GAMG( PC pc, const PCGAMGType type ) 1311 { 1312 PetscErrorCode ierr,(*r)(PC); 1313 1314 PetscFunctionBegin; 1315 ierr = PetscFListFind(GAMGList,((PetscObject)pc)->comm,type,PETSC_FALSE,(PetscVoidStarFunction)&r); 1316 CHKERRQ(ierr); 1317 1318 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1319 1320 /* call sub create method */ 1321 ierr = (*r)(pc); CHKERRQ(ierr); 1322 1323 PetscFunctionReturn(0); 1324 } 1325 EXTERN_C_END 1326 1327 #undef __FUNCT__ 1328 #define __FUNCT__ "PCSetFromOptions_GAMG" 1329 PetscErrorCode PCSetFromOptions_GAMG( PC pc ) 1330 { 1331 PetscErrorCode ierr; 1332 PC_MG *mg = (PC_MG*)pc->data; 1333 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1334 PetscBool flag; 1335 MPI_Comm wcomm = ((PetscObject)pc)->comm; 1336 1337 PetscFunctionBegin; 1338 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 1339 { 1340 /* -pc_gamg_verbose */ 1341 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG", 1342 "none", pc_gamg->verbose, 1343 &pc_gamg->verbose, PETSC_NULL ); 1344 CHKERRQ(ierr); 1345 1346 /* -pc_gamg_repartition */ 1347 ierr = PetscOptionsBool("-pc_gamg_repartition", 1348 "Repartion coarse grids (false)", 1349 "PCGAMGRepartitioning", 1350 pc_gamg->repart, 1351 &pc_gamg->repart, 1352 &flag); 1353 CHKERRQ(ierr); 1354 1355 /* -pc_gamg_use_agg_gasm */ 1356 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm", 1357 "Use aggregation agragates for GASM smoother (false)", 1358 "PCGAMGUseASMAggs", 1359 pc_gamg->use_aggs_in_gasm, 1360 &pc_gamg->use_aggs_in_gasm, 1361 &flag); 1362 CHKERRQ(ierr); 1363 1364 /* -pc_gamg_process_eq_limit */ 1365 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit", 1366 "Limit (goal) on number of equations per process on coarse grids", 1367 "PCGAMGSetProcEqLim", 1368 pc_gamg->min_eq_proc, 1369 &pc_gamg->min_eq_proc, 1370 &flag ); 1371 CHKERRQ(ierr); 1372 1373 /* -pc_gamg_coarse_eq_limit */ 1374 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit", 1375 "Limit on number of equations for the coarse grid", 1376 "PCGAMGSetCoarseEqLim", 1377 pc_gamg->coarse_eq_limit, 1378 &pc_gamg->coarse_eq_limit, 1379 &flag ); 1380 CHKERRQ(ierr); 1381 1382 /* -pc_gamg_threshold */ 1383 ierr = PetscOptionsReal("-pc_gamg_threshold", 1384 "Relative threshold to use for dropping edges in aggregation graph", 1385 "PCGAMGSetThreshold", 1386 pc_gamg->threshold, 1387 &pc_gamg->threshold, 1388 &flag ); 1389 CHKERRQ(ierr); 1390 if(flag && pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold); 1391 1392 ierr = PetscOptionsInt("-pc_mg_levels", 1393 "Set number of MG levels", 1394 "PCGAMGSetNlevels", 1395 pc_gamg->Nlevels, 1396 &pc_gamg->Nlevels, 1397 &flag ); 1398 } 1399 ierr = PetscOptionsTail();CHKERRQ(ierr); 1400 1401 PetscFunctionReturn(0); 1402 } 1403 1404 /* -------------------------------------------------------------------------- */ 1405 /*MC 1406 PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework. 1407 - This is the entry point to GAMG, registered in pcregis.c 1408 1409 Options Database Keys: 1410 Multigrid options(inherited) 1411 + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType) 1412 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1413 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1414 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade 1415 1416 Level: intermediate 1417 1418 Concepts: multigrid 1419 1420 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType, 1421 PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(), 1422 PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(), 1423 PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(), 1424 PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR() 1425 M*/ 1426 EXTERN_C_BEGIN 1427 #undef __FUNCT__ 1428 #define __FUNCT__ "PCCreate_GAMG" 1429 PetscErrorCode PCCreate_GAMG( PC pc ) 1430 { 1431 PetscErrorCode ierr; 1432 PC_GAMG *pc_gamg; 1433 PC_MG *mg; 1434 #if defined PETSC_GAMG_USE_LOG 1435 static long count = 0; 1436 #endif 1437 1438 PetscFunctionBegin; 1439 1440 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1441 ierr = PCSetType( pc, PCMG ); CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1442 ierr = PetscObjectChangeTypeName( (PetscObject)pc, PCGAMG ); CHKERRQ(ierr); 1443 1444 /* create a supporting struct and attach it to pc */ 1445 ierr = PetscNewLog( pc, PC_GAMG, &pc_gamg ); CHKERRQ(ierr); 1446 mg = (PC_MG*)pc->data; 1447 mg->galerkin = 2; /* Use Galerkin, but it is computed externally */ 1448 mg->innerctx = pc_gamg; 1449 1450 pc_gamg->setup_count = 0; 1451 /* these should be in subctx but repartitioning needs simple arrays */ 1452 pc_gamg->data_sz = 0; 1453 pc_gamg->data = 0; 1454 1455 /* register AMG type */ 1456 if( !GAMGList ){ 1457 ierr = PetscFListAdd(&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void(*)(void))PCCreateGAMG_GEO);CHKERRQ(ierr); 1458 ierr = PetscFListAdd(&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void(*)(void))PCCreateGAMG_AGG);CHKERRQ(ierr); 1459 } 1460 1461 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1462 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1463 pc->ops->setup = PCSetUp_GAMG; 1464 pc->ops->reset = PCReset_GAMG; 1465 pc->ops->destroy = PCDestroy_GAMG; 1466 1467 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1468 "PCGAMGSetProcEqLim_C", 1469 "PCGAMGSetProcEqLim_GAMG", 1470 PCGAMGSetProcEqLim_GAMG); 1471 CHKERRQ(ierr); 1472 1473 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1474 "PCGAMGSetCoarseEqLim_C", 1475 "PCGAMGSetCoarseEqLim_GAMG", 1476 PCGAMGSetCoarseEqLim_GAMG); 1477 CHKERRQ(ierr); 1478 1479 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1480 "PCGAMGSetRepartitioning_C", 1481 "PCGAMGSetRepartitioning_GAMG", 1482 PCGAMGSetRepartitioning_GAMG); 1483 CHKERRQ(ierr); 1484 1485 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1486 "PCGAMGSetUseASMAggs_C", 1487 "PCGAMGSetUseASMAggs_GAMG", 1488 PCGAMGSetUseASMAggs_GAMG); 1489 CHKERRQ(ierr); 1490 1491 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1492 "PCGAMGSetThreshold_C", 1493 "PCGAMGSetThreshold_GAMG", 1494 PCGAMGSetThreshold_GAMG); 1495 CHKERRQ(ierr); 1496 1497 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1498 "PCGAMGSetType_C", 1499 "PCGAMGSetType_GAMG", 1500 PCGAMGSetType_GAMG); 1501 CHKERRQ(ierr); 1502 1503 pc_gamg->repart = PETSC_FALSE; 1504 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1505 pc_gamg->min_eq_proc = 100; 1506 pc_gamg->coarse_eq_limit = 800; 1507 pc_gamg->threshold = 0.001; 1508 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1509 pc_gamg->verbose = 0; 1510 pc_gamg->emax_id = -1; 1511 1512 /* private events */ 1513 #if defined PETSC_GAMG_USE_LOG 1514 if( count++ == 0 ) { 1515 PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]); 1516 PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]); 1517 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1518 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1519 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1520 PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]); 1521 PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]); 1522 PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]); 1523 PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]); 1524 PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]); 1525 PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]); 1526 PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]); 1527 PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]); 1528 PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]); 1529 PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]); 1530 PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]); 1531 PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]); 1532 1533 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1534 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1535 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1536 /* create timer stages */ 1537 #if defined GAMG_STAGES 1538 { 1539 char str[32]; 1540 sprintf(str,"MG Level %d (finest)",0); 1541 PetscLogStageRegister(str, &gamg_stages[0]); 1542 PetscInt lidx; 1543 for (lidx=1;lidx<9;lidx++){ 1544 sprintf(str,"MG Level %d",lidx); 1545 PetscLogStageRegister(str, &gamg_stages[lidx]); 1546 } 1547 } 1548 #endif 1549 } 1550 #endif 1551 /* general events */ 1552 #if defined PETSC_USE_LOG 1553 PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG); 1554 PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO); 1555 PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG); 1556 PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO); 1557 PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG); 1558 PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO); 1559 PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG); 1560 PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG); 1561 #endif 1562 1563 /* instantiate derived type */ 1564 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 1565 { 1566 char tname[256] = GAMGAGG; 1567 ierr = PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType", 1568 GAMGList, tname, tname, sizeof(tname), PETSC_NULL ); 1569 CHKERRQ(ierr); 1570 ierr = PCGAMGSetType( pc, tname ); CHKERRQ(ierr); 1571 } 1572 ierr = PetscOptionsTail(); CHKERRQ(ierr); 1573 1574 PetscFunctionReturn(0); 1575 } 1576 EXTERN_C_END 1577