1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include "petsc-private/matimpl.h" 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGgraph_AGG; 14 PetscLogEvent PC_GAMGGgraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptprol_AGG; 20 PetscLogEvent PC_GAMGKKTProl_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFList GAMGList = 0; 31 32 /* ----------------------------------------------------------------------------- */ 33 #undef __FUNCT__ 34 #define __FUNCT__ "PCReset_GAMG" 35 PetscErrorCode PCReset_GAMG(PC pc) 36 { 37 PetscErrorCode ierr; 38 PC_MG *mg = (PC_MG*)pc->data; 39 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 40 41 PetscFunctionBegin; 42 if( pc_gamg->data ) { /* this should not happen, cleaned up in SetUp */ 43 ierr = PetscFree( pc_gamg->data ); CHKERRQ(ierr); 44 } 45 pc_gamg->data = PETSC_NULL; pc_gamg->data_sz = 0; 46 PetscFunctionReturn(0); 47 } 48 49 /* private 2x2 Mat Nest for Stokes */ 50 typedef struct{ 51 Mat A11,A21,A12,Amat; 52 IS prim_is,constr_is; 53 }GAMGKKTMat; 54 55 #undef __FUNCT__ 56 #define __FUNCT__ "GAMGKKTMatCreate" 57 static PetscErrorCode GAMGKKTMatCreate( Mat A, PetscBool iskkt, GAMGKKTMat *out ) 58 { 59 PetscFunctionBegin; 60 out->Amat = A; 61 if( iskkt ){ 62 PetscErrorCode ierr; 63 IS is_constraint, is_prime; 64 PetscInt nmin,nmax; 65 66 ierr = MatGetOwnershipRange( A, &nmin, &nmax ); CHKERRQ(ierr); 67 ierr = MatFindZeroDiagonals( A, &is_constraint ); CHKERRQ(ierr); 68 ierr = ISComplement( is_constraint, nmin, nmax, &is_prime ); CHKERRQ(ierr); 69 out->prim_is = is_prime; 70 out->constr_is = is_constraint; 71 72 ierr = MatGetSubMatrix( A, is_prime, is_prime, MAT_INITIAL_MATRIX, &out->A11); CHKERRQ(ierr); 73 ierr = MatGetSubMatrix( A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12); CHKERRQ(ierr); 74 ierr = MatGetSubMatrix( A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21); CHKERRQ(ierr); 75 PetscPrintf(((PetscObject)A)->comm,"[%d]%s N=%d N_11=%d\n",0,__FUNCT__,A->rmap->N,out->A11->rmap->N); 76 } 77 else { 78 out->A11 = A; 79 out->A21 = PETSC_NULL; 80 out->A12 = PETSC_NULL; 81 out->prim_is = PETSC_NULL; 82 out->constr_is = PETSC_NULL; 83 } 84 PetscFunctionReturn(0); 85 } 86 87 #undef __FUNCT__ 88 #define __FUNCT__ "GAMGKKTMatDestroy" 89 static PetscErrorCode GAMGKKTMatDestroy( GAMGKKTMat *mat ) 90 { 91 PetscErrorCode ierr; 92 93 PetscFunctionBegin; 94 if( mat->A11 && mat->A11 != mat->Amat ) { 95 ierr = MatDestroy( &mat->A11 ); CHKERRQ(ierr); 96 } 97 ierr = MatDestroy( &mat->A21 ); CHKERRQ(ierr); 98 ierr = MatDestroy( &mat->A12 ); CHKERRQ(ierr); 99 100 ierr = ISDestroy( &mat->prim_is ); CHKERRQ(ierr); 101 ierr = ISDestroy( &mat->constr_is ); CHKERRQ(ierr); 102 103 PetscFunctionReturn(0); 104 } 105 106 /* -------------------------------------------------------------------------- */ 107 /* 108 createLevel: create coarse op with RAP. repartition and/or reduce number 109 of active processors. 110 111 Input Parameter: 112 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 113 'pc_gamg->data_sz' are changed via repartitioning/reduction. 114 . Amat_fine - matrix on this fine (k) level 115 . cr_bs - coarse block size 116 . isLast - 117 . stokes - 118 In/Output Parameter: 119 . a_P_inout - prolongation operator to the next level (k-->k-1) 120 . a_nactive_proc - number of active procs 121 Output Parameter: 122 . a_Amat_crs - coarse matrix that is created (k-1) 123 */ 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "createLevel" 127 static PetscErrorCode createLevel( const PC pc, 128 const Mat Amat_fine, 129 const PetscInt cr_bs, 130 const PetscBool isLast, 131 const PetscBool stokes, 132 Mat *a_P_inout, 133 Mat *a_Amat_crs, 134 PetscMPIInt *a_nactive_proc 135 ) 136 { 137 PetscErrorCode ierr; 138 PC_MG *mg = (PC_MG*)pc->data; 139 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 140 const PetscBool repart = pc_gamg->repart; 141 const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit; 142 Mat Cmat,Pold=*a_P_inout; 143 MPI_Comm wcomm = ((PetscObject)Amat_fine)->comm; 144 PetscMPIInt mype,npe,new_npe,nactive=*a_nactive_proc; 145 PetscInt ncrs_eq,ncrs_prim,f_bs; 146 147 PetscFunctionBegin; 148 ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr); 149 ierr = MPI_Comm_size( wcomm, &npe ); CHKERRQ(ierr); 150 ierr = MatGetBlockSize( Amat_fine, &f_bs ); CHKERRQ(ierr); 151 /* RAP */ 152 ierr = MatPtAP( Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr); 153 154 /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/ 155 ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 156 assert(pc_gamg->data_sz%(pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)==0); 157 ierr = MatGetLocalSize( Cmat, &ncrs_eq, PETSC_NULL ); CHKERRQ(ierr); 158 159 /* get number of PEs to make active 'new_npe', reduce, can be any integer 1-P */ 160 { 161 PetscInt ncrs_eq_glob,ncrs_eq_ave; 162 ierr = MatGetSize( Cmat, &ncrs_eq_glob, PETSC_NULL ); CHKERRQ(ierr); 163 ncrs_eq_ave = ncrs_eq_glob/npe; 164 new_npe = (PetscMPIInt)((float)ncrs_eq_ave/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 165 if( new_npe == 0 || ncrs_eq_ave < coarse_max ) new_npe = 1; 166 else if ( new_npe >= nactive ) new_npe = nactive; /* no change, rare */ 167 if( isLast ) new_npe = 1; 168 } 169 170 if( !repart && new_npe==nactive ) { 171 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 172 } 173 else { 174 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 175 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old; 176 IS is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices; 177 VecScatter vecscat; 178 PetscScalar *array; 179 Vec src_crd, dest_crd; 180 181 nloc_old = ncrs_eq/cr_bs; assert(ncrs_eq%cr_bs==0); 182 #if defined PETSC_GAMG_USE_LOG 183 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 184 #endif 185 /* make 'is_eq_newproc' */ 186 ierr = PetscMalloc( npe*sizeof(PetscInt), &counts ); CHKERRQ(ierr); 187 if( repart && !stokes ) { 188 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 189 Mat adj; 190 191 if (pc_gamg->verbose>0) { 192 if (pc_gamg->verbose==1) PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,ncrs_eq); 193 else { 194 PetscInt n; 195 ierr = MPI_Allreduce( &ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, wcomm );CHKERRQ(ierr); 196 PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,n); 197 } 198 } 199 200 /* get 'adj' */ 201 if( cr_bs == 1 ) { 202 ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 203 } 204 else{ 205 /* make a scalar matrix to partition (no Stokes here) */ 206 Mat tMat; 207 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 208 const PetscScalar *vals; 209 const PetscInt *idx; 210 PetscInt *d_nnz, *o_nnz, M, N; 211 static PetscInt llev = 0; 212 213 ierr = PetscMalloc( ncrs_prim*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr); 214 ierr = PetscMalloc( ncrs_prim*sizeof(PetscInt), &o_nnz ); CHKERRQ(ierr); 215 ierr = MatGetOwnershipRange( Cmat, &Istart_crs, &Iend_crs ); CHKERRQ(ierr); 216 ierr = MatGetSize( Cmat, &M, &N );CHKERRQ(ierr); 217 for ( Ii = Istart_crs, jj = 0 ; Ii < Iend_crs ; Ii += cr_bs, jj++ ) { 218 ierr = MatGetRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 219 d_nnz[jj] = ncols/cr_bs; 220 o_nnz[jj] = ncols/cr_bs; 221 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 222 if( d_nnz[jj] > ncrs_prim ) d_nnz[jj] = ncrs_prim; 223 if( o_nnz[jj] > (M/cr_bs-ncrs_prim) ) o_nnz[jj] = M/cr_bs-ncrs_prim; 224 } 225 226 ierr = MatCreate( wcomm, &tMat ); CHKERRQ(ierr); 227 ierr = MatSetSizes( tMat, ncrs_prim, ncrs_prim, 228 PETSC_DETERMINE, PETSC_DETERMINE ); 229 CHKERRQ(ierr); 230 ierr = MatSetType(tMat,MATAIJ); CHKERRQ(ierr); 231 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 232 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 233 ierr = PetscFree( d_nnz ); CHKERRQ(ierr); 234 ierr = PetscFree( o_nnz ); CHKERRQ(ierr); 235 236 for ( ii = Istart_crs; ii < Iend_crs; ii++ ) { 237 PetscInt dest_row = ii/cr_bs; 238 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 239 for( jj = 0 ; jj < ncols ; jj++ ){ 240 PetscInt dest_col = idx[jj]/cr_bs; 241 PetscScalar v = 1.0; 242 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr); 243 } 244 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 245 } 246 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 247 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 248 249 if( llev++ == -1 ) { 250 PetscViewer viewer; char fname[32]; 251 ierr = PetscSNPrintf(fname,sizeof fname,"part_mat_%D.mat",llev);CHKERRQ(ierr); 252 PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer); 253 ierr = MatView( tMat, viewer ); CHKERRQ(ierr); 254 ierr = PetscViewerDestroy( &viewer ); 255 } 256 257 ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 258 259 ierr = MatDestroy( &tMat ); CHKERRQ(ierr); 260 } /* create 'adj' */ 261 262 { /* partition: get newproc_idx */ 263 char prefix[256]; 264 const char *pcpre; 265 const PetscInt *is_idx; 266 MatPartitioning mpart; 267 IS proc_is; 268 PetscInt targetPE; 269 270 ierr = MatPartitioningCreate( wcomm, &mpart ); CHKERRQ(ierr); 271 ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr); 272 ierr = PCGetOptionsPrefix( pc, &pcpre );CHKERRQ(ierr); 273 ierr = PetscSNPrintf(prefix,sizeof prefix,"%spc_gamg_",pcpre?pcpre:"");CHKERRQ(ierr); 274 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 275 ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr); 276 ierr = MatPartitioningSetNParts( mpart, new_npe );CHKERRQ(ierr); 277 ierr = MatPartitioningApply( mpart, &proc_is ); CHKERRQ(ierr); 278 ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr); 279 280 /* collect IS info */ 281 ierr = PetscMalloc( ncrs_eq*sizeof(PetscInt), &newproc_idx ); CHKERRQ(ierr); 282 ierr = ISGetIndices( proc_is, &is_idx ); CHKERRQ(ierr); 283 targetPE = 1; /* bring to "front" of machine */ 284 /*targetPE = npe/new_npe;*/ /* spread partitioning across machine */ 285 for( kk = jj = 0 ; kk < nloc_old ; kk++ ){ 286 for( ii = 0 ; ii < cr_bs ; ii++, jj++ ){ 287 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 288 } 289 } 290 ierr = ISRestoreIndices( proc_is, &is_idx ); CHKERRQ(ierr); 291 ierr = ISDestroy( &proc_is ); CHKERRQ(ierr); 292 } 293 ierr = MatDestroy( &adj ); CHKERRQ(ierr); 294 295 ierr = ISCreateGeneral( wcomm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc ); 296 CHKERRQ(ierr); 297 if( newproc_idx != 0 ) { 298 ierr = PetscFree( newproc_idx ); CHKERRQ(ierr); 299 } 300 } /* repartitioning */ 301 else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 302 303 PetscInt rfactor,targetPE; 304 /* find factor */ 305 if( new_npe == 1 ) rfactor = npe; /* easy */ 306 else { 307 PetscReal best_fact = 0.; 308 jj = -1; 309 for( kk = 1 ; kk <= npe ; kk++ ){ 310 if( npe%kk==0 ) { /* a candidate */ 311 PetscReal nactpe = (PetscReal)npe/(PetscReal)kk, fact = nactpe/(PetscReal)new_npe; 312 if(fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 313 if( fact > best_fact ) { 314 best_fact = fact; jj = kk; 315 } 316 } 317 } 318 if( jj != -1 ) rfactor = jj; 319 else rfactor = 1; /* does this happen .. a prime */ 320 } 321 new_npe = npe/rfactor; 322 323 if( new_npe==nactive ) { 324 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 325 ierr = PetscFree( counts ); CHKERRQ(ierr); 326 if (pc_gamg->verbose>0){ 327 PetscPrintf(wcomm,"\t[%d]%s aggregate processors noop: new_npe=%d, neq(loc)=%d\n",mype,__FUNCT__,new_npe,ncrs_eq); 328 } 329 PetscFunctionReturn(0); 330 } 331 332 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",mype,__FUNCT__,ncrs_eq); 333 targetPE = mype/rfactor; 334 ierr = ISCreateStride( wcomm, ncrs_eq, targetPE, 0, &is_eq_newproc ); CHKERRQ(ierr); 335 336 if( stokes ) { 337 ierr = ISCreateStride( wcomm, ncrs_prim*cr_bs, targetPE, 0, &is_eq_newproc_prim ); CHKERRQ(ierr); 338 } 339 } /* end simple 'is_eq_newproc' */ 340 341 /* 342 Create an index set from the is_eq_newproc index set to indicate the mapping TO 343 */ 344 ierr = ISPartitioningToNumbering( is_eq_newproc, &is_eq_num ); CHKERRQ(ierr); 345 if( stokes ) { 346 ierr = ISPartitioningToNumbering( is_eq_newproc_prim, &is_eq_num_prim ); CHKERRQ(ierr); 347 } 348 else is_eq_num_prim = is_eq_num; 349 /* 350 Determine how many equations/vertices are assigned to each processor 351 */ 352 ierr = ISPartitioningCount( is_eq_newproc, npe, counts ); CHKERRQ(ierr); 353 ncrs_eq_new = counts[mype]; 354 ierr = ISDestroy( &is_eq_newproc ); CHKERRQ(ierr); 355 if( stokes ) { 356 ierr = ISPartitioningCount( is_eq_newproc_prim, npe, counts ); CHKERRQ(ierr); 357 ierr = ISDestroy( &is_eq_newproc_prim ); CHKERRQ(ierr); 358 ncrs_prim_new = counts[mype]/cr_bs; /* nodes */ 359 } 360 else ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */ 361 362 ierr = PetscFree( counts ); CHKERRQ(ierr); 363 #if defined PETSC_GAMG_USE_LOG 364 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0); CHKERRQ(ierr); 365 #endif 366 367 /* move data (for primal equations only) */ 368 /* Create a vector to contain the newly ordered element information */ 369 ierr = VecCreate( wcomm, &dest_crd ); 370 ierr = VecSetSizes( dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE ); CHKERRQ(ierr); 371 ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /* this is needed! */ 372 /* 373 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 374 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 375 */ 376 ierr = PetscMalloc( (ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx ); CHKERRQ(ierr); 377 ierr = ISGetIndices( is_eq_num_prim, &idx ); CHKERRQ(ierr); 378 for(ii=0,jj=0; ii<ncrs_prim ; ii++) { 379 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 380 for( kk=0; kk<node_data_sz ; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 381 } 382 ierr = ISRestoreIndices( is_eq_num_prim, &idx ); CHKERRQ(ierr); 383 ierr = ISCreateGeneral( wcomm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat ); 384 CHKERRQ(ierr); 385 ierr = PetscFree( tidx ); CHKERRQ(ierr); 386 /* 387 Create a vector to contain the original vertex information for each element 388 */ 389 ierr = VecCreateSeq( PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd ); CHKERRQ(ierr); 390 for( jj=0; jj<ndata_cols ; jj++ ) { 391 const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows; 392 for( ii=0 ; ii<ncrs_prim ; ii++) { 393 for( kk=0; kk<ndata_rows ; kk++ ) { 394 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 395 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 396 ierr = VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES ); CHKERRQ(ierr); 397 } 398 } 399 } 400 ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr); 401 ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr); 402 /* 403 Scatter the element vertex information (still in the original vertex ordering) 404 to the correct processor 405 */ 406 ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat); 407 CHKERRQ(ierr); 408 ierr = ISDestroy( &isscat ); CHKERRQ(ierr); 409 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 410 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 411 ierr = VecScatterDestroy( &vecscat ); CHKERRQ(ierr); 412 ierr = VecDestroy( &src_crd ); CHKERRQ(ierr); 413 /* 414 Put the element vertex data into a new allocation of the gdata->ele 415 */ 416 ierr = PetscFree( pc_gamg->data ); CHKERRQ(ierr); 417 ierr = PetscMalloc( node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data ); CHKERRQ(ierr); 418 pc_gamg->data_sz = node_data_sz*ncrs_prim_new; 419 strideNew = ncrs_prim_new*ndata_rows; 420 ierr = VecGetArray( dest_crd, &array ); CHKERRQ(ierr); 421 for( jj=0; jj<ndata_cols ; jj++ ) { 422 for( ii=0 ; ii<ncrs_prim_new ; ii++) { 423 for( kk=0; kk<ndata_rows ; kk++ ) { 424 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 425 pc_gamg->data[ix] = PetscRealPart(array[jx]); 426 } 427 } 428 } 429 ierr = VecRestoreArray( dest_crd, &array ); CHKERRQ(ierr); 430 ierr = VecDestroy( &dest_crd ); CHKERRQ(ierr); 431 432 /* move A and P (columns) with new layout */ 433 #if defined PETSC_GAMG_USE_LOG 434 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 435 #endif 436 437 /* 438 Invert for MatGetSubMatrix 439 */ 440 ierr = ISInvertPermutation( is_eq_num, ncrs_eq_new, &new_eq_indices ); CHKERRQ(ierr); 441 ierr = ISSort( new_eq_indices ); CHKERRQ(ierr); /* is this needed? */ 442 ierr = ISSetBlockSize( new_eq_indices, cr_bs ); CHKERRQ(ierr); 443 if(is_eq_num != is_eq_num_prim) { 444 ierr = ISDestroy( &is_eq_num_prim ); CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 445 } 446 ierr = ISDestroy( &is_eq_num ); CHKERRQ(ierr); 447 #if defined PETSC_GAMG_USE_LOG 448 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 449 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 450 #endif 451 /* 'a_Amat_crs' output */ 452 { 453 Mat mat; 454 ierr = MatGetSubMatrix( Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat ); 455 CHKERRQ(ierr); 456 *a_Amat_crs = mat; 457 458 if(!PETSC_TRUE){ 459 PetscInt cbs, rbs; 460 ierr = MatGetBlockSizes( Cmat, &rbs, &cbs ); CHKERRQ(ierr); 461 PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs); 462 ierr = MatGetBlockSizes( mat, &rbs, &cbs ); CHKERRQ(ierr); 463 PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",mype,__FUNCT__,rbs,cbs,cr_bs); 464 } 465 } 466 ierr = MatDestroy( &Cmat ); CHKERRQ(ierr); 467 468 #if defined PETSC_GAMG_USE_LOG 469 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 470 #endif 471 /* prolongator */ 472 { 473 IS findices; 474 PetscInt Istart,Iend; 475 Mat Pnew; 476 ierr = MatGetOwnershipRange( Pold, &Istart, &Iend ); CHKERRQ(ierr); 477 #if defined PETSC_GAMG_USE_LOG 478 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 479 #endif 480 ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices); CHKERRQ(ierr); 481 ierr = ISSetBlockSize(findices,f_bs); CHKERRQ(ierr); 482 ierr = MatGetSubMatrix( Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew ); 483 CHKERRQ(ierr); 484 ierr = ISDestroy( &findices ); CHKERRQ(ierr); 485 486 if(!PETSC_TRUE){ 487 PetscInt cbs, rbs; 488 ierr = MatGetBlockSizes( Pold, &rbs, &cbs ); CHKERRQ(ierr); 489 PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs); 490 ierr = MatGetBlockSizes( Pnew, &rbs, &cbs ); CHKERRQ(ierr); 491 PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs); 492 } 493 #if defined PETSC_GAMG_USE_LOG 494 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 495 #endif 496 ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr); 497 498 /* output - repartitioned */ 499 *a_P_inout = Pnew; 500 } 501 ierr = ISDestroy( &new_eq_indices ); CHKERRQ(ierr); 502 503 *a_nactive_proc = new_npe; /* output */ 504 } 505 506 /* outout matrix data */ 507 if( !PETSC_TRUE ) { 508 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 509 if(llev==0) { 510 sprintf(fname,"Cmat_%d.m",llev++); 511 PetscViewerASCIIOpen(wcomm,fname,&viewer); 512 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 513 ierr = MatView(Amat_fine, viewer ); CHKERRQ(ierr); 514 ierr = PetscViewerDestroy( &viewer ); 515 } 516 sprintf(fname,"Cmat_%d.m",llev++); 517 PetscViewerASCIIOpen(wcomm,fname,&viewer); 518 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 519 ierr = MatView(Cmat, viewer ); CHKERRQ(ierr); 520 ierr = PetscViewerDestroy( &viewer ); 521 } 522 523 PetscFunctionReturn(0); 524 } 525 526 /* -------------------------------------------------------------------------- */ 527 /* 528 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 529 by setting data structures and options. 530 531 Input Parameter: 532 . pc - the preconditioner context 533 534 Application Interface Routine: PCSetUp() 535 536 Notes: 537 The interface routine PCSetUp() is not usually called directly by 538 the user, but instead is called by PCApply() if necessary. 539 */ 540 #undef __FUNCT__ 541 #define __FUNCT__ "PCSetUp_GAMG" 542 PetscErrorCode PCSetUp_GAMG( PC pc ) 543 { 544 PetscErrorCode ierr; 545 PC_MG *mg = (PC_MG*)pc->data; 546 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 547 Mat Pmat = pc->pmat; 548 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 549 MPI_Comm wcomm = ((PetscObject)pc)->comm; 550 PetscMPIInt mype,npe,nactivepe; 551 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 552 PetscReal emaxs[GAMG_MAXLEVELS]; 553 IS *ASMLocalIDsArr[GAMG_MAXLEVELS],removedEqs[GAMG_MAXLEVELS]; 554 PetscInt level_bs[GAMG_MAXLEVELS]; 555 GAMGKKTMat kktMatsArr[GAMG_MAXLEVELS]; 556 PetscLogDouble nnz0=0.,nnztot=0.; 557 MatInfo info; 558 PetscBool stokes = PETSC_FALSE; 559 560 PetscFunctionBegin; 561 ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr); 562 ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr); 563 if (pc_gamg->verbose>2) PetscPrintf(wcomm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",mype,__FUNCT__,pc_gamg->setup_count,pc->setupcalled); 564 if( pc_gamg->setup_count++ > 0 ) { 565 PC_MG_Levels **mglevels = mg->levels; 566 /* just do Galerkin grids */ 567 Mat B,dA,dB; 568 assert(pc->setupcalled); 569 570 if( pc_gamg->Nlevels > 1 ) { 571 /* currently only handle case where mat and pmat are the same on coarser levels */ 572 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);CHKERRQ(ierr); 573 /* (re)set to get dirty flag */ 574 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 575 576 for (level=pc_gamg->Nlevels-2; level>-1; level--) { 577 /* the first time through the matrix structure has changed from repartitioning */ 578 if( pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) { 579 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 580 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 581 mglevels[level]->A = B; 582 } 583 else { 584 ierr = KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);CHKERRQ(ierr); 585 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 586 } 587 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN); CHKERRQ(ierr); 588 dB = B; 589 } 590 } 591 592 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 593 594 /* PCSetUp_MG seems to insists on setting this to GMRES */ 595 ierr = KSPSetType( mglevels[0]->smoothd, KSPPREONLY ); CHKERRQ(ierr); 596 597 PetscFunctionReturn(0); 598 } 599 assert(pc->setupcalled == 0); 600 601 ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,PETSC_NULL);CHKERRQ(ierr); 602 603 ierr = GAMGKKTMatCreate( Pmat, stokes, &kktMatsArr[0] ); CHKERRQ(ierr); 604 605 if( pc_gamg->data == 0 ) { 606 if( !pc_gamg->createdefaultdata ){ 607 SETERRQ(wcomm,PETSC_ERR_LIB,"'createdefaultdata' not set(?) need to support NULL data"); 608 } 609 if( stokes ) { 610 SETERRQ(wcomm,PETSC_ERR_LIB,"Need data (eg, PCSetCoordinates) for Stokes problems"); 611 } 612 ierr = pc_gamg->createdefaultdata( pc, kktMatsArr[0].A11 ); CHKERRQ(ierr); 613 } 614 615 /* get basic dims */ 616 if( stokes ) { 617 bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */ 618 } 619 else { 620 ierr = MatGetBlockSize( Pmat, &bs ); CHKERRQ(ierr); 621 } 622 623 ierr = MatGetSize( Pmat, &M, &qq );CHKERRQ(ierr); 624 if (pc_gamg->verbose) { 625 if(pc_gamg->verbose==1) ierr = MatGetInfo(Pmat,MAT_LOCAL,&info); 626 else ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info); 627 CHKERRQ(ierr); 628 nnz0 = info.nz_used; 629 nnztot = info.nz_used; 630 PetscPrintf(wcomm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 631 mype,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 632 (int)(nnz0/(PetscReal)M),npe); 633 } 634 635 /* Get A_i and R_i */ 636 for ( level=0, Aarr[0]=Pmat, nactivepe = npe; /* hard wired stopping logic */ 637 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (npe==1 || nactivepe>1); */ 638 level++ ){ 639 level1 = level + 1; 640 #if defined PETSC_GAMG_USE_LOG 641 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0); CHKERRQ(ierr); 642 #if (defined GAMG_STAGES) 643 ierr = PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr ); 644 #endif 645 #endif 646 /* deal with Stokes, get sub matrices */ 647 if( level > 0 ) { 648 ierr = GAMGKKTMatCreate( Aarr[level], stokes, &kktMatsArr[level] ); CHKERRQ(ierr); 649 } 650 { /* construct prolongator */ 651 Mat Gmat; 652 PetscCoarsenData *agg_lists; 653 Mat Prol11,Prol22; 654 655 level_bs[level] = bs; 656 ierr = pc_gamg->graph( pc,kktMatsArr[level].A11, &Gmat ); CHKERRQ(ierr); 657 ierr = pc_gamg->coarsen( pc, &Gmat, &agg_lists ); CHKERRQ(ierr); 658 ierr = pc_gamg->prolongator( pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11 ); CHKERRQ(ierr); 659 660 /* could have failed to create new level */ 661 if( Prol11 ){ 662 /* get new block size of coarse matrices */ 663 ierr = MatGetBlockSizes( Prol11, PETSC_NULL, &bs ); CHKERRQ(ierr); 664 665 if( stokes ) { 666 if(!pc_gamg->formkktprol) SETERRQ(wcomm,PETSC_ERR_USER,"Stokes not supportd by AMG method."); 667 /* R A12 == (T = A21 P)'; G = T' T; coarsen G; form plain agg with G */ 668 ierr = pc_gamg->formkktprol( pc, Prol11, kktMatsArr[level].A21, &Prol22 ); CHKERRQ(ierr); 669 } 670 671 if( pc_gamg->optprol ){ 672 /* smooth */ 673 ierr = pc_gamg->optprol( pc, kktMatsArr[level].A11, &Prol11 ); CHKERRQ(ierr); 674 } 675 676 if( stokes ) { 677 IS is_row[2] = {kktMatsArr[level].prim_is,kktMatsArr[level].constr_is}; 678 Mat a[4] = {Prol11, PETSC_NULL, PETSC_NULL, Prol22 }; 679 ierr = MatCreateNest(wcomm,2,is_row, 2, is_row, a, &Parr[level1] ); CHKERRQ(ierr); 680 } 681 else { 682 Parr[level1] = Prol11; 683 } 684 } 685 else Parr[level1] = PETSC_NULL; 686 687 if ( pc_gamg->use_aggs_in_gasm ) { 688 ierr = PetscCDGetASMBlocks(agg_lists, level_bs[level], &nASMBlocksArr[level], &ASMLocalIDsArr[level] ); CHKERRQ(ierr); 689 } 690 691 ierr = PetscCDGetRemovedIS( agg_lists, &removedEqs[level] ); CHKERRQ(ierr); 692 693 ierr = MatDestroy( &Gmat ); CHKERRQ(ierr); 694 ierr = PetscCDDestroy( agg_lists ); CHKERRQ(ierr); 695 } /* construct prolongator scope */ 696 #if defined PETSC_GAMG_USE_LOG 697 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 698 #endif 699 /* cache eigen estimate */ 700 if( pc_gamg->emax_id != -1 ){ 701 PetscBool flag; 702 ierr = PetscObjectComposedDataGetReal( (PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag ); 703 CHKERRQ( ierr ); 704 if( !flag ) emaxs[level] = -1.; 705 } 706 else emaxs[level] = -1.; 707 if(level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 708 if( !Parr[level1] ) { 709 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s stop gridding, level %d\n",mype,__FUNCT__,level); 710 break; 711 } 712 #if defined PETSC_GAMG_USE_LOG 713 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 714 #endif 715 716 ierr = createLevel( pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2), 717 stokes, &Parr[level1], &Aarr[level1], &nactivepe ); 718 CHKERRQ(ierr); 719 720 #if defined PETSC_GAMG_USE_LOG 721 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 722 #endif 723 ierr = MatGetSize( Aarr[level1], &M, &qq );CHKERRQ(ierr); 724 725 if (pc_gamg->verbose > 0){ 726 PetscInt NN = M; 727 if(pc_gamg->verbose==1) { 728 ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info); CHKERRQ(ierr); 729 ierr = MatGetLocalSize( Aarr[level1], &NN, &qq ); 730 } 731 else ierr = MatGetInfo( Aarr[level1], MAT_GLOBAL_SUM, &info ); 732 733 CHKERRQ(ierr); 734 nnztot += info.nz_used; 735 PetscPrintf(wcomm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 736 mype,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols, 737 (int)(info.nz_used/(PetscReal)NN), nactivepe ); 738 CHKERRQ(ierr); 739 } 740 741 /* stop if one node -- could pull back for singular problems */ 742 if( M/pc_gamg->data_cell_cols < 2 ) { 743 level++; 744 break; 745 } 746 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 747 ierr = PetscLogStagePop(); CHKERRQ( ierr ); 748 #endif 749 } /* levels */ 750 751 if( pc_gamg->data ) { 752 ierr = PetscFree( pc_gamg->data ); CHKERRQ( ierr ); 753 pc_gamg->data = PETSC_NULL; 754 } 755 756 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0); 757 pc_gamg->Nlevels = level + 1; 758 fine_level = level; 759 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,PETSC_NULL);CHKERRQ(ierr); 760 761 /* simple setup */ 762 if( !PETSC_TRUE ){ 763 PC_MG_Levels **mglevels = mg->levels; 764 for (lidx=0,level=pc_gamg->Nlevels-1; 765 lidx<fine_level; 766 lidx++, level--){ 767 ierr = PCMGSetInterpolation( pc, lidx+1, Parr[level] );CHKERRQ(ierr); 768 ierr = KSPSetOperators( mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );CHKERRQ(ierr); 769 ierr = MatDestroy( &Parr[level] ); CHKERRQ(ierr); 770 ierr = MatDestroy( &Aarr[level] ); CHKERRQ(ierr); 771 } 772 ierr = KSPSetOperators( mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 773 774 ierr = PCSetUp_MG( pc ); CHKERRQ( ierr ); 775 } 776 else if( pc_gamg->Nlevels > 1 ) { /* don't setup MG if one level */ 777 /* set default smoothers & set operators */ 778 for ( lidx = 1, level = pc_gamg->Nlevels-2; 779 lidx <= fine_level; 780 lidx++, level--) { 781 KSP smoother; 782 PC subpc; 783 784 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 785 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 786 787 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 788 /* set ops */ 789 ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 790 ierr = PCMGSetInterpolation( pc, lidx, Parr[level+1] );CHKERRQ(ierr); 791 792 /* create field split PC, get subsmoother */ 793 if( stokes ) { 794 KSP *ksps; 795 PetscInt nn; 796 ierr = PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is); CHKERRQ(ierr); 797 ierr = PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is); CHKERRQ(ierr); 798 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps); CHKERRQ(ierr); 799 smoother = ksps[0]; 800 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 801 ierr = PetscFree( ksps ); CHKERRQ(ierr); 802 } 803 ierr = GAMGKKTMatDestroy( &kktMatsArr[level] ); CHKERRQ(ierr); 804 805 /* set defaults */ 806 ierr = KSPSetType( smoother, KSPCHEBYSHEV );CHKERRQ(ierr); 807 808 /* override defaults and command line args (!) */ 809 if ( pc_gamg->use_aggs_in_gasm ) { 810 PetscInt sz; 811 IS *is; 812 813 sz = nASMBlocksArr[level]; 814 is = ASMLocalIDsArr[level]; 815 ierr = PCSetType( subpc, PCGASM ); CHKERRQ(ierr); 816 if(sz==0){ 817 IS is; 818 PetscInt my0,kk; 819 ierr = MatGetOwnershipRange( Aarr[level], &my0, &kk ); CHKERRQ(ierr); 820 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is ); CHKERRQ(ierr); 821 ierr = PCGASMSetSubdomains( subpc, 1, &is, PETSC_NULL ); CHKERRQ(ierr); 822 ierr = ISDestroy( &is ); CHKERRQ(ierr); 823 } 824 else { 825 PetscInt kk; 826 ierr = PCGASMSetSubdomains( subpc, sz, is, PETSC_NULL ); CHKERRQ(ierr); 827 for(kk=0;kk<sz;kk++){ 828 ierr = ISDestroy( &is[kk] ); CHKERRQ(ierr); 829 } 830 ierr = PetscFree( is ); CHKERRQ(ierr); 831 } 832 ierr = PCGASMSetOverlap( subpc, 0 ); CHKERRQ(ierr); 833 834 ASMLocalIDsArr[level] = PETSC_NULL; 835 nASMBlocksArr[level] = 0; 836 ierr = PCGASMSetType( subpc, PC_GASM_BASIC ); CHKERRQ(ierr); 837 } 838 else { 839 ierr = PCSetType( subpc, PCJACOBI ); CHKERRQ(ierr); 840 } 841 } 842 { 843 /* coarse grid */ 844 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 845 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 846 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 847 ierr = KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 848 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 849 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 850 ierr = PCSetType( subpc, PCBJACOBI ); CHKERRQ(ierr); 851 ierr = PCSetUp( subpc ); CHKERRQ(ierr); 852 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); assert(ii==1); 853 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 854 ierr = PCSetType( pc2, PCLU ); CHKERRQ(ierr); 855 } 856 857 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 858 ierr = PetscObjectOptionsBegin( (PetscObject)pc );CHKERRQ(ierr); 859 ierr = PCSetFromOptions_MG( pc ); CHKERRQ(ierr); 860 ierr = PetscOptionsEnd(); CHKERRQ(ierr); 861 if (mg->galerkin != 2) SETERRQ(wcomm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used."); 862 863 /* create cheby smoothers */ 864 for ( lidx = 1, level = pc_gamg->Nlevels-2; 865 lidx <= fine_level; 866 lidx++, level--) { 867 KSP smoother; 868 PetscBool flag; 869 PC subpc; 870 871 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 872 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 873 874 /* create field split PC, get subsmoother */ 875 if( stokes ) { 876 KSP *ksps; 877 PetscInt nn; 878 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps); CHKERRQ(ierr); 879 smoother = ksps[0]; 880 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 881 ierr = PetscFree( ksps ); CHKERRQ(ierr); 882 } 883 884 /* do my own cheby */ 885 ierr = PetscObjectTypeCompare( (PetscObject)smoother, KSPCHEBYSHEV, &flag ); CHKERRQ(ierr); 886 if( flag ) { 887 PetscReal emax, emin; 888 ierr = PetscObjectTypeCompare( (PetscObject)subpc, PCJACOBI, &flag ); CHKERRQ(ierr); 889 if( flag && emaxs[level] > 0.0 ) emax=emaxs[level]; /* eigen estimate only for diagnal PC */ 890 else{ /* eigen estimate 'emax' */ 891 KSP eksp; Mat Lmat = Aarr[level]; 892 Vec bb, xx; 893 894 ierr = MatGetVecs( Lmat, &bb, 0 ); CHKERRQ(ierr); 895 ierr = MatGetVecs( Lmat, &xx, 0 ); CHKERRQ(ierr); 896 { 897 PetscRandom rctx; 898 ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr); 899 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 900 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 901 ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr); 902 } 903 904 if( removedEqs[level] ) { 905 /* being very careful - zeroing out BC rows (this is not done in agg.c estimates) */ 906 PetscScalar *zeros; 907 PetscInt ii,jj, *idx_bs, sz, bs=level_bs[level]; 908 const PetscInt *idx; 909 ierr = ISGetLocalSize( removedEqs[level], &sz ); CHKERRQ(ierr); 910 ierr = PetscMalloc( bs*sz*sizeof(PetscScalar), &zeros ); CHKERRQ(ierr); 911 for(ii=0;ii<bs*sz;ii++) zeros[ii] = 0.; 912 ierr = PetscMalloc( bs*sz*sizeof(PetscInt), &idx_bs ); CHKERRQ(ierr); 913 ierr = ISGetIndices( removedEqs[level], &idx); CHKERRQ(ierr); 914 for(ii=0;ii<sz;ii++) { 915 for(jj=0;jj<bs;jj++) { 916 idx_bs[ii] = bs*idx[ii]+jj; 917 } 918 } 919 ierr = ISRestoreIndices( removedEqs[level], &idx ); CHKERRQ(ierr); 920 if( sz > 0 ) { 921 ierr = VecSetValues( bb, sz, idx_bs, zeros, INSERT_VALUES ); CHKERRQ(ierr); 922 } 923 ierr = PetscFree( idx_bs ); CHKERRQ(ierr); 924 ierr = PetscFree( zeros ); CHKERRQ(ierr); 925 ierr = VecAssemblyBegin(bb); CHKERRQ(ierr); 926 ierr = VecAssemblyEnd(bb); CHKERRQ(ierr); 927 } 928 ierr = KSPCreate( wcomm, &eksp );CHKERRQ(ierr); 929 ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 ); 930 CHKERRQ(ierr); 931 ierr = KSPSetNormType( eksp, KSP_NORM_NONE ); CHKERRQ(ierr); 932 ierr = KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);CHKERRQ(ierr); 933 ierr = KSPAppendOptionsPrefix( eksp, "gamg_est_"); CHKERRQ(ierr); 934 ierr = KSPSetFromOptions( eksp ); CHKERRQ(ierr); 935 936 ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr); 937 ierr = KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr ); 938 ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr); 939 940 /* set PC type to be same as smoother */ 941 ierr = KSPSetPC( eksp, subpc ); CHKERRQ( ierr ); 942 943 /* solve - keep stuff out of logging */ 944 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 945 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 946 ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr); 947 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 948 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 949 950 ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr); 951 952 ierr = VecDestroy( &xx ); CHKERRQ(ierr); 953 ierr = VecDestroy( &bb ); CHKERRQ(ierr); 954 ierr = KSPDestroy( &eksp ); CHKERRQ(ierr); 955 956 if( pc_gamg->verbose > 0 ) { 957 PetscInt N1, tt; 958 ierr = MatGetSize( Aarr[level], &N1, &tt ); CHKERRQ(ierr); 959 PetscPrintf(wcomm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1); 960 } 961 } 962 { 963 PetscInt N1, N0; 964 ierr = MatGetSize( Aarr[level], &N1, PETSC_NULL ); CHKERRQ(ierr); 965 ierr = MatGetSize( Aarr[level+1], &N0, PETSC_NULL ); CHKERRQ(ierr); 966 /* heuristic - is this crap? */ 967 emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); 968 emax *= 1.05; 969 } 970 ierr = KSPChebyshevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr); 971 } /* setup checby flag */ 972 973 if( removedEqs[level] ) { 974 ierr = ISDestroy( &removedEqs[level] ); CHKERRQ(ierr); 975 } 976 } /* non-coarse levels */ 977 978 /* clean up */ 979 for(level=1;level<pc_gamg->Nlevels;level++){ 980 ierr = MatDestroy( &Parr[level] ); CHKERRQ(ierr); 981 ierr = MatDestroy( &Aarr[level] ); CHKERRQ(ierr); 982 } 983 984 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 985 986 if( PETSC_FALSE ){ 987 KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */ 988 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 989 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 990 } 991 } 992 else { 993 KSP smoother; 994 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",mype,__FUNCT__); 995 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 996 ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 997 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 998 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 999 } 1000 1001 PetscFunctionReturn(0); 1002 } 1003 1004 /* ------------------------------------------------------------------------- */ 1005 /* 1006 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 1007 that was created with PCCreate_GAMG(). 1008 1009 Input Parameter: 1010 . pc - the preconditioner context 1011 1012 Application Interface Routine: PCDestroy() 1013 */ 1014 #undef __FUNCT__ 1015 #define __FUNCT__ "PCDestroy_GAMG" 1016 PetscErrorCode PCDestroy_GAMG( PC pc ) 1017 { 1018 PetscErrorCode ierr; 1019 PC_MG *mg = (PC_MG*)pc->data; 1020 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 1021 1022 PetscFunctionBegin; 1023 ierr = PCReset_GAMG( pc );CHKERRQ(ierr); 1024 ierr = PetscFree( pc_gamg );CHKERRQ(ierr); 1025 ierr = PCDestroy_MG( pc );CHKERRQ(ierr); 1026 PetscFunctionReturn(0); 1027 } 1028 1029 1030 #undef __FUNCT__ 1031 #define __FUNCT__ "PCGAMGSetProcEqLim" 1032 /*@ 1033 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via 1034 processor reduction. 1035 1036 Not Collective on PC 1037 1038 Input Parameters: 1039 . pc - the preconditioner context 1040 1041 1042 Options Database Key: 1043 . -pc_gamg_process_eq_limit 1044 1045 Level: intermediate 1046 1047 Concepts: Unstructured multrigrid preconditioner 1048 1049 .seealso: () 1050 @*/ 1051 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 1052 { 1053 PetscErrorCode ierr; 1054 1055 PetscFunctionBegin; 1056 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1057 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1058 PetscFunctionReturn(0); 1059 } 1060 1061 EXTERN_C_BEGIN 1062 #undef __FUNCT__ 1063 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 1064 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 1065 { 1066 PC_MG *mg = (PC_MG*)pc->data; 1067 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1068 1069 PetscFunctionBegin; 1070 if(n>0) pc_gamg->min_eq_proc = n; 1071 PetscFunctionReturn(0); 1072 } 1073 EXTERN_C_END 1074 1075 #undef __FUNCT__ 1076 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 1077 /*@ 1078 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 1079 1080 Collective on PC 1081 1082 Input Parameters: 1083 . pc - the preconditioner context 1084 1085 1086 Options Database Key: 1087 . -pc_gamg_coarse_eq_limit 1088 1089 Level: intermediate 1090 1091 Concepts: Unstructured multrigrid preconditioner 1092 1093 .seealso: () 1094 @*/ 1095 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 1096 { 1097 PetscErrorCode ierr; 1098 1099 PetscFunctionBegin; 1100 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1101 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1102 PetscFunctionReturn(0); 1103 } 1104 1105 EXTERN_C_BEGIN 1106 #undef __FUNCT__ 1107 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 1108 PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 1109 { 1110 PC_MG *mg = (PC_MG*)pc->data; 1111 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1112 1113 PetscFunctionBegin; 1114 if(n>0) pc_gamg->coarse_eq_limit = n; 1115 PetscFunctionReturn(0); 1116 } 1117 EXTERN_C_END 1118 1119 #undef __FUNCT__ 1120 #define __FUNCT__ "PCGAMGSetRepartitioning" 1121 /*@ 1122 PCGAMGSetRepartitioning - Repartition the coarse grids 1123 1124 Collective on PC 1125 1126 Input Parameters: 1127 . pc - the preconditioner context 1128 1129 1130 Options Database Key: 1131 . -pc_gamg_repartition 1132 1133 Level: intermediate 1134 1135 Concepts: Unstructured multrigrid preconditioner 1136 1137 .seealso: () 1138 @*/ 1139 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 1140 { 1141 PetscErrorCode ierr; 1142 1143 PetscFunctionBegin; 1144 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1145 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1146 PetscFunctionReturn(0); 1147 } 1148 1149 EXTERN_C_BEGIN 1150 #undef __FUNCT__ 1151 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 1152 PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 1153 { 1154 PC_MG *mg = (PC_MG*)pc->data; 1155 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1156 1157 PetscFunctionBegin; 1158 pc_gamg->repart = n; 1159 PetscFunctionReturn(0); 1160 } 1161 EXTERN_C_END 1162 1163 #undef __FUNCT__ 1164 #define __FUNCT__ "PCGAMGSetUseASMAggs" 1165 /*@ 1166 PCGAMGSetUseASMAggs - 1167 1168 Collective on PC 1169 1170 Input Parameters: 1171 . pc - the preconditioner context 1172 1173 1174 Options Database Key: 1175 . -pc_gamg_use_agg_gasm 1176 1177 Level: intermediate 1178 1179 Concepts: Unstructured multrigrid preconditioner 1180 1181 .seealso: () 1182 @*/ 1183 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 1184 { 1185 PetscErrorCode ierr; 1186 1187 PetscFunctionBegin; 1188 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1189 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1190 PetscFunctionReturn(0); 1191 } 1192 1193 EXTERN_C_BEGIN 1194 #undef __FUNCT__ 1195 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 1196 PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 1197 { 1198 PC_MG *mg = (PC_MG*)pc->data; 1199 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1200 1201 PetscFunctionBegin; 1202 pc_gamg->use_aggs_in_gasm = n; 1203 PetscFunctionReturn(0); 1204 } 1205 EXTERN_C_END 1206 1207 #undef __FUNCT__ 1208 #define __FUNCT__ "PCGAMGSetNlevels" 1209 /*@ 1210 PCGAMGSetNlevels - 1211 1212 Not collective on PC 1213 1214 Input Parameters: 1215 . pc - the preconditioner context 1216 1217 1218 Options Database Key: 1219 . -pc_mg_levels 1220 1221 Level: intermediate 1222 1223 Concepts: Unstructured multrigrid preconditioner 1224 1225 .seealso: () 1226 @*/ 1227 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1228 { 1229 PetscErrorCode ierr; 1230 1231 PetscFunctionBegin; 1232 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1233 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1234 PetscFunctionReturn(0); 1235 } 1236 1237 EXTERN_C_BEGIN 1238 #undef __FUNCT__ 1239 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1240 PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1241 { 1242 PC_MG *mg = (PC_MG*)pc->data; 1243 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1244 1245 PetscFunctionBegin; 1246 pc_gamg->Nlevels = n; 1247 PetscFunctionReturn(0); 1248 } 1249 EXTERN_C_END 1250 1251 #undef __FUNCT__ 1252 #define __FUNCT__ "PCGAMGSetThreshold" 1253 /*@ 1254 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1255 1256 Not collective on PC 1257 1258 Input Parameters: 1259 . pc - the preconditioner context 1260 1261 1262 Options Database Key: 1263 . -pc_gamg_threshold 1264 1265 Level: intermediate 1266 1267 Concepts: Unstructured multrigrid preconditioner 1268 1269 .seealso: () 1270 @*/ 1271 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1272 { 1273 PetscErrorCode ierr; 1274 1275 PetscFunctionBegin; 1276 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1277 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1278 PetscFunctionReturn(0); 1279 } 1280 1281 EXTERN_C_BEGIN 1282 #undef __FUNCT__ 1283 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1284 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1285 { 1286 PC_MG *mg = (PC_MG*)pc->data; 1287 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1288 1289 PetscFunctionBegin; 1290 pc_gamg->threshold = n; 1291 PetscFunctionReturn(0); 1292 } 1293 EXTERN_C_END 1294 1295 #undef __FUNCT__ 1296 #define __FUNCT__ "PCGAMGSetType" 1297 /*@ 1298 PCGAMGSetType - Set solution method - calls sub create method 1299 1300 Collective on PC 1301 1302 Input Parameters: 1303 . pc - the preconditioner context 1304 1305 1306 Options Database Key: 1307 . -pc_gamg_type 1308 1309 Level: intermediate 1310 1311 Concepts: Unstructured multrigrid preconditioner 1312 1313 .seealso: () 1314 @*/ 1315 PetscErrorCode PCGAMGSetType( PC pc, const PCGAMGType type ) 1316 { 1317 PetscErrorCode ierr; 1318 1319 PetscFunctionBegin; 1320 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1321 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,const PCGAMGType),(pc,type)); 1322 CHKERRQ(ierr); 1323 PetscFunctionReturn(0); 1324 } 1325 1326 EXTERN_C_BEGIN 1327 #undef __FUNCT__ 1328 #define __FUNCT__ "PCGAMGSetType_GAMG" 1329 PetscErrorCode PCGAMGSetType_GAMG( PC pc, const PCGAMGType type ) 1330 { 1331 PetscErrorCode ierr,(*r)(PC); 1332 1333 PetscFunctionBegin; 1334 ierr = PetscFListFind(GAMGList,((PetscObject)pc)->comm,type,PETSC_FALSE,(PetscVoidStarFunction)&r); 1335 CHKERRQ(ierr); 1336 1337 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1338 1339 /* call sub create method */ 1340 ierr = (*r)(pc); CHKERRQ(ierr); 1341 1342 PetscFunctionReturn(0); 1343 } 1344 EXTERN_C_END 1345 1346 #undef __FUNCT__ 1347 #define __FUNCT__ "PCSetFromOptions_GAMG" 1348 PetscErrorCode PCSetFromOptions_GAMG( PC pc ) 1349 { 1350 PetscErrorCode ierr; 1351 PC_MG *mg = (PC_MG*)pc->data; 1352 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1353 PetscBool flag; 1354 MPI_Comm wcomm = ((PetscObject)pc)->comm; 1355 1356 PetscFunctionBegin; 1357 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 1358 { 1359 /* -pc_gamg_verbose */ 1360 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG", 1361 "none", pc_gamg->verbose, 1362 &pc_gamg->verbose, PETSC_NULL ); 1363 CHKERRQ(ierr); 1364 1365 /* -pc_gamg_repartition */ 1366 ierr = PetscOptionsBool("-pc_gamg_repartition", 1367 "Repartion coarse grids (false)", 1368 "PCGAMGRepartitioning", 1369 pc_gamg->repart, 1370 &pc_gamg->repart, 1371 &flag); 1372 CHKERRQ(ierr); 1373 1374 /* -pc_gamg_use_agg_gasm */ 1375 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm", 1376 "Use aggregation agragates for GASM smoother (false)", 1377 "PCGAMGUseASMAggs", 1378 pc_gamg->use_aggs_in_gasm, 1379 &pc_gamg->use_aggs_in_gasm, 1380 &flag); 1381 CHKERRQ(ierr); 1382 1383 /* -pc_gamg_process_eq_limit */ 1384 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit", 1385 "Limit (goal) on number of equations per process on coarse grids", 1386 "PCGAMGSetProcEqLim", 1387 pc_gamg->min_eq_proc, 1388 &pc_gamg->min_eq_proc, 1389 &flag ); 1390 CHKERRQ(ierr); 1391 1392 /* -pc_gamg_coarse_eq_limit */ 1393 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit", 1394 "Limit on number of equations for the coarse grid", 1395 "PCGAMGSetCoarseEqLim", 1396 pc_gamg->coarse_eq_limit, 1397 &pc_gamg->coarse_eq_limit, 1398 &flag ); 1399 CHKERRQ(ierr); 1400 1401 /* -pc_gamg_threshold */ 1402 ierr = PetscOptionsReal("-pc_gamg_threshold", 1403 "Relative threshold to use for dropping edges in aggregation graph", 1404 "PCGAMGSetThreshold", 1405 pc_gamg->threshold, 1406 &pc_gamg->threshold, 1407 &flag ); 1408 CHKERRQ(ierr); 1409 if(flag && pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold); 1410 1411 ierr = PetscOptionsInt("-pc_mg_levels", 1412 "Set number of MG levels", 1413 "PCGAMGSetNlevels", 1414 pc_gamg->Nlevels, 1415 &pc_gamg->Nlevels, 1416 &flag ); 1417 } 1418 ierr = PetscOptionsTail();CHKERRQ(ierr); 1419 1420 PetscFunctionReturn(0); 1421 } 1422 1423 /* -------------------------------------------------------------------------- */ 1424 /*MC 1425 PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework. 1426 - This is the entry point to GAMG, registered in pcregis.c 1427 1428 Options Database Keys: 1429 Multigrid options(inherited) 1430 + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType) 1431 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1432 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1433 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade 1434 1435 Level: intermediate 1436 1437 Concepts: multigrid 1438 1439 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType, 1440 PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(), 1441 PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(), 1442 PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(), 1443 PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR() 1444 M*/ 1445 EXTERN_C_BEGIN 1446 #undef __FUNCT__ 1447 #define __FUNCT__ "PCCreate_GAMG" 1448 PetscErrorCode PCCreate_GAMG( PC pc ) 1449 { 1450 PetscErrorCode ierr; 1451 PC_GAMG *pc_gamg; 1452 PC_MG *mg; 1453 #if defined PETSC_GAMG_USE_LOG 1454 static long count = 0; 1455 #endif 1456 1457 PetscFunctionBegin; 1458 1459 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1460 ierr = PCSetType( pc, PCMG ); CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1461 ierr = PetscObjectChangeTypeName( (PetscObject)pc, PCGAMG ); CHKERRQ(ierr); 1462 1463 /* create a supporting struct and attach it to pc */ 1464 ierr = PetscNewLog( pc, PC_GAMG, &pc_gamg ); CHKERRQ(ierr); 1465 mg = (PC_MG*)pc->data; 1466 mg->galerkin = 2; /* Use Galerkin, but it is computed externally */ 1467 mg->innerctx = pc_gamg; 1468 1469 pc_gamg->setup_count = 0; 1470 /* these should be in subctx but repartitioning needs simple arrays */ 1471 pc_gamg->data_sz = 0; 1472 pc_gamg->data = 0; 1473 1474 /* register AMG type */ 1475 if( !GAMGList ){ 1476 ierr = PetscFListAdd(&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void(*)(void))PCCreateGAMG_GEO);CHKERRQ(ierr); 1477 ierr = PetscFListAdd(&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void(*)(void))PCCreateGAMG_AGG);CHKERRQ(ierr); 1478 } 1479 1480 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1481 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1482 pc->ops->setup = PCSetUp_GAMG; 1483 pc->ops->reset = PCReset_GAMG; 1484 pc->ops->destroy = PCDestroy_GAMG; 1485 1486 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1487 "PCGAMGSetProcEqLim_C", 1488 "PCGAMGSetProcEqLim_GAMG", 1489 PCGAMGSetProcEqLim_GAMG); 1490 CHKERRQ(ierr); 1491 1492 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1493 "PCGAMGSetCoarseEqLim_C", 1494 "PCGAMGSetCoarseEqLim_GAMG", 1495 PCGAMGSetCoarseEqLim_GAMG); 1496 CHKERRQ(ierr); 1497 1498 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1499 "PCGAMGSetRepartitioning_C", 1500 "PCGAMGSetRepartitioning_GAMG", 1501 PCGAMGSetRepartitioning_GAMG); 1502 CHKERRQ(ierr); 1503 1504 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1505 "PCGAMGSetUseASMAggs_C", 1506 "PCGAMGSetUseASMAggs_GAMG", 1507 PCGAMGSetUseASMAggs_GAMG); 1508 CHKERRQ(ierr); 1509 1510 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1511 "PCGAMGSetThreshold_C", 1512 "PCGAMGSetThreshold_GAMG", 1513 PCGAMGSetThreshold_GAMG); 1514 CHKERRQ(ierr); 1515 1516 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1517 "PCGAMGSetType_C", 1518 "PCGAMGSetType_GAMG", 1519 PCGAMGSetType_GAMG); 1520 CHKERRQ(ierr); 1521 1522 pc_gamg->repart = PETSC_FALSE; 1523 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1524 pc_gamg->min_eq_proc = 100; 1525 pc_gamg->coarse_eq_limit = 800; 1526 pc_gamg->threshold = 0.001; 1527 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1528 pc_gamg->verbose = 0; 1529 pc_gamg->emax_id = -1; 1530 1531 /* private events */ 1532 #if defined PETSC_GAMG_USE_LOG 1533 if( count++ == 0 ) { 1534 PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]); 1535 PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]); 1536 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1537 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1538 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1539 PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]); 1540 PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]); 1541 PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]); 1542 PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]); 1543 PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]); 1544 PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]); 1545 PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]); 1546 PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]); 1547 PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]); 1548 PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]); 1549 PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]); 1550 PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]); 1551 1552 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1553 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1554 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1555 /* create timer stages */ 1556 #if defined GAMG_STAGES 1557 { 1558 char str[32]; 1559 sprintf(str,"MG Level %d (finest)",0); 1560 PetscLogStageRegister(str, &gamg_stages[0]); 1561 PetscInt lidx; 1562 for (lidx=1;lidx<9;lidx++){ 1563 sprintf(str,"MG Level %d",lidx); 1564 PetscLogStageRegister(str, &gamg_stages[lidx]); 1565 } 1566 } 1567 #endif 1568 } 1569 #endif 1570 /* general events */ 1571 #if defined PETSC_USE_LOG 1572 PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG); 1573 PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO); 1574 PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG); 1575 PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO); 1576 PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG); 1577 PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO); 1578 PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG); 1579 PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG); 1580 #endif 1581 1582 /* instantiate derived type */ 1583 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 1584 { 1585 char tname[256] = GAMGAGG; 1586 ierr = PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType", 1587 GAMGList, tname, tname, sizeof(tname), PETSC_NULL ); 1588 CHKERRQ(ierr); 1589 ierr = PCGAMGSetType( pc, tname ); CHKERRQ(ierr); 1590 } 1591 ierr = PetscOptionsTail(); CHKERRQ(ierr); 1592 1593 PetscFunctionReturn(0); 1594 } 1595 EXTERN_C_END 1596