1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include "petsc-private/matimpl.h" 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGgraph_AGG; 14 PetscLogEvent PC_GAMGGgraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptprol_AGG; 20 PetscLogEvent PC_GAMGKKTProl_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFList GAMGList = 0; 31 32 /* ----------------------------------------------------------------------------- */ 33 #undef __FUNCT__ 34 #define __FUNCT__ "PCReset_GAMG" 35 PetscErrorCode PCReset_GAMG(PC pc) 36 { 37 PetscErrorCode ierr; 38 PC_MG *mg = (PC_MG*)pc->data; 39 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 40 41 PetscFunctionBegin; 42 if ( pc_gamg->data ) { /* this should not happen, cleaned up in SetUp */ 43 PetscPrintf(((PetscObject)pc)->comm,"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__); 44 ierr = PetscFree( pc_gamg->data ); CHKERRQ(ierr); 45 } 46 pc_gamg->data = PETSC_NULL; pc_gamg->data_sz = 0; 47 48 if ( pc_gamg->orig_data ) { 49 ierr = PetscFree( pc_gamg->orig_data ); CHKERRQ(ierr); 50 } 51 52 PetscFunctionReturn(0); 53 } 54 55 /* private 2x2 Mat Nest for Stokes */ 56 typedef struct{ 57 Mat A11,A21,A12,Amat; 58 IS prim_is,constr_is; 59 }GAMGKKTMat; 60 61 #undef __FUNCT__ 62 #define __FUNCT__ "GAMGKKTMatCreate" 63 static PetscErrorCode GAMGKKTMatCreate( Mat A, PetscBool iskkt, GAMGKKTMat *out ) 64 { 65 PetscFunctionBegin; 66 out->Amat = A; 67 if ( iskkt ){ 68 PetscErrorCode ierr; 69 IS is_constraint, is_prime; 70 PetscInt nmin,nmax; 71 72 ierr = MatGetOwnershipRange( A, &nmin, &nmax ); CHKERRQ(ierr); 73 ierr = MatFindZeroDiagonals( A, &is_constraint ); CHKERRQ(ierr); 74 ierr = ISComplement( is_constraint, nmin, nmax, &is_prime ); CHKERRQ(ierr); 75 out->prim_is = is_prime; 76 out->constr_is = is_constraint; 77 78 ierr = MatGetSubMatrix( A, is_prime, is_prime, MAT_INITIAL_MATRIX, &out->A11); CHKERRQ(ierr); 79 ierr = MatGetSubMatrix( A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12); CHKERRQ(ierr); 80 ierr = MatGetSubMatrix( A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21); CHKERRQ(ierr); 81 } 82 else { 83 out->A11 = A; 84 out->A21 = PETSC_NULL; 85 out->A12 = PETSC_NULL; 86 out->prim_is = PETSC_NULL; 87 out->constr_is = PETSC_NULL; 88 } 89 PetscFunctionReturn(0); 90 } 91 92 #undef __FUNCT__ 93 #define __FUNCT__ "GAMGKKTMatDestroy" 94 static PetscErrorCode GAMGKKTMatDestroy( GAMGKKTMat *mat ) 95 { 96 PetscErrorCode ierr; 97 98 PetscFunctionBegin; 99 if ( mat->A11 && mat->A11 != mat->Amat ) { 100 ierr = MatDestroy( &mat->A11 ); CHKERRQ(ierr); 101 } 102 ierr = MatDestroy( &mat->A21 ); CHKERRQ(ierr); 103 ierr = MatDestroy( &mat->A12 ); CHKERRQ(ierr); 104 105 ierr = ISDestroy( &mat->prim_is ); CHKERRQ(ierr); 106 ierr = ISDestroy( &mat->constr_is ); CHKERRQ(ierr); 107 108 PetscFunctionReturn(0); 109 } 110 111 /* -------------------------------------------------------------------------- */ 112 /* 113 createLevel: create coarse op with RAP. repartition and/or reduce number 114 of active processors. 115 116 Input Parameter: 117 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 118 'pc_gamg->data_sz' are changed via repartitioning/reduction. 119 . Amat_fine - matrix on this fine (k) level 120 . cr_bs - coarse block size 121 . isLast - 122 . stokes - 123 In/Output Parameter: 124 . a_P_inout - prolongation operator to the next level (k-->k-1) 125 . a_nactive_proc - number of active procs 126 Output Parameter: 127 . a_Amat_crs - coarse matrix that is created (k-1) 128 */ 129 130 #undef __FUNCT__ 131 #define __FUNCT__ "createLevel" 132 static PetscErrorCode createLevel( const PC pc, 133 const Mat Amat_fine, 134 const PetscInt cr_bs, 135 const PetscBool isLast, 136 const PetscBool stokes, 137 Mat *a_P_inout, 138 Mat *a_Amat_crs, 139 PetscMPIInt *a_nactive_proc 140 ) 141 { 142 PetscErrorCode ierr; 143 PC_MG *mg = (PC_MG*)pc->data; 144 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 145 const PetscBool repart = pc_gamg->repart; 146 const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit; 147 Mat Cmat,Pold=*a_P_inout; 148 MPI_Comm wcomm = ((PetscObject)Amat_fine)->comm; 149 PetscMPIInt mype,npe,new_npe,nactive=*a_nactive_proc; 150 PetscInt ncrs_eq,ncrs_prim,f_bs; 151 152 PetscFunctionBegin; 153 ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr); 154 ierr = MPI_Comm_size( wcomm, &npe ); CHKERRQ(ierr); 155 ierr = MatGetBlockSize( Amat_fine, &f_bs ); CHKERRQ(ierr); 156 /* RAP */ 157 ierr = MatPtAP( Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr); 158 159 /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/ 160 ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 161 assert(pc_gamg->data_sz%(pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)==0); 162 ierr = MatGetLocalSize( Cmat, &ncrs_eq, PETSC_NULL ); CHKERRQ(ierr); 163 164 /* get number of PEs to make active 'new_npe', reduce, can be any integer 1-P */ 165 { 166 PetscInt ncrs_eq_glob; 167 ierr = MatGetSize( Cmat, &ncrs_eq_glob, PETSC_NULL ); CHKERRQ(ierr); 168 new_npe = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 169 if ( new_npe == 0 || ncrs_eq_glob < coarse_max ) new_npe = 1; 170 else if ( new_npe >= nactive ) new_npe = nactive; /* no change, rare */ 171 if ( isLast ) new_npe = 1; 172 } 173 174 if ( !repart && new_npe==nactive ) { 175 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 176 } 177 else { 178 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 179 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old; 180 IS is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices; 181 VecScatter vecscat; 182 PetscScalar *array; 183 Vec src_crd, dest_crd; 184 185 nloc_old = ncrs_eq/cr_bs; assert(ncrs_eq%cr_bs==0); 186 #if defined PETSC_GAMG_USE_LOG 187 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 188 #endif 189 /* make 'is_eq_newproc' */ 190 ierr = PetscMalloc( npe*sizeof(PetscInt), &counts ); CHKERRQ(ierr); 191 if ( repart && !stokes ) { 192 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 193 Mat adj; 194 195 if (pc_gamg->verbose>0) { 196 if (pc_gamg->verbose==1) PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,ncrs_eq); 197 else { 198 PetscInt n; 199 ierr = MPI_Allreduce( &ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, wcomm );CHKERRQ(ierr); 200 PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,n); 201 } 202 } 203 204 /* get 'adj' */ 205 if ( cr_bs == 1 ) { 206 ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 207 } 208 else{ 209 /* make a scalar matrix to partition (no Stokes here) */ 210 Mat tMat; 211 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 212 const PetscScalar *vals; 213 const PetscInt *idx; 214 PetscInt *d_nnz, *o_nnz, M, N; 215 static PetscInt llev = 0; 216 217 ierr = PetscMalloc( ncrs_prim*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr); 218 ierr = PetscMalloc( ncrs_prim*sizeof(PetscInt), &o_nnz ); CHKERRQ(ierr); 219 ierr = MatGetOwnershipRange( Cmat, &Istart_crs, &Iend_crs ); CHKERRQ(ierr); 220 ierr = MatGetSize( Cmat, &M, &N );CHKERRQ(ierr); 221 for ( Ii = Istart_crs, jj = 0 ; Ii < Iend_crs ; Ii += cr_bs, jj++ ) { 222 ierr = MatGetRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 223 d_nnz[jj] = ncols/cr_bs; 224 o_nnz[jj] = ncols/cr_bs; 225 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 226 if ( d_nnz[jj] > ncrs_prim ) d_nnz[jj] = ncrs_prim; 227 if ( o_nnz[jj] > (M/cr_bs-ncrs_prim) ) o_nnz[jj] = M/cr_bs-ncrs_prim; 228 } 229 230 ierr = MatCreate( wcomm, &tMat ); CHKERRQ(ierr); 231 ierr = MatSetSizes( tMat, ncrs_prim, ncrs_prim, 232 PETSC_DETERMINE, PETSC_DETERMINE ); 233 CHKERRQ(ierr); 234 ierr = MatSetType(tMat,MATAIJ); CHKERRQ(ierr); 235 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 236 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 237 ierr = PetscFree( d_nnz ); CHKERRQ(ierr); 238 ierr = PetscFree( o_nnz ); CHKERRQ(ierr); 239 240 for ( ii = Istart_crs; ii < Iend_crs; ii++ ) { 241 PetscInt dest_row = ii/cr_bs; 242 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 243 for ( jj = 0 ; jj < ncols ; jj++ ){ 244 PetscInt dest_col = idx[jj]/cr_bs; 245 PetscScalar v = 1.0; 246 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr); 247 } 248 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 249 } 250 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 251 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 252 253 if ( llev++ == -1 ) { 254 PetscViewer viewer; char fname[32]; 255 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 256 PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer); 257 ierr = MatView( tMat, viewer ); CHKERRQ(ierr); 258 ierr = PetscViewerDestroy( &viewer ); 259 } 260 261 ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 262 263 ierr = MatDestroy( &tMat ); CHKERRQ(ierr); 264 } /* create 'adj' */ 265 266 { /* partition: get newproc_idx */ 267 char prefix[256]; 268 const char *pcpre; 269 const PetscInt *is_idx; 270 MatPartitioning mpart; 271 IS proc_is; 272 PetscInt targetPE; 273 274 ierr = MatPartitioningCreate( wcomm, &mpart ); CHKERRQ(ierr); 275 ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr); 276 ierr = PCGetOptionsPrefix( pc, &pcpre );CHKERRQ(ierr); 277 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre?pcpre:"");CHKERRQ(ierr); 278 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 279 ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr); 280 ierr = MatPartitioningSetNParts( mpart, new_npe );CHKERRQ(ierr); 281 ierr = MatPartitioningApply( mpart, &proc_is ); CHKERRQ(ierr); 282 ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr); 283 284 /* collect IS info */ 285 ierr = PetscMalloc( ncrs_eq*sizeof(PetscInt), &newproc_idx ); CHKERRQ(ierr); 286 ierr = ISGetIndices( proc_is, &is_idx ); CHKERRQ(ierr); 287 targetPE = 1; /* bring to "front" of machine */ 288 /*targetPE = npe/new_npe;*/ /* spread partitioning across machine */ 289 for ( kk = jj = 0 ; kk < nloc_old ; kk++ ){ 290 for ( ii = 0 ; ii < cr_bs ; ii++, jj++ ){ 291 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 292 } 293 } 294 ierr = ISRestoreIndices( proc_is, &is_idx ); CHKERRQ(ierr); 295 ierr = ISDestroy( &proc_is ); CHKERRQ(ierr); 296 } 297 ierr = MatDestroy( &adj ); CHKERRQ(ierr); 298 299 ierr = ISCreateGeneral( wcomm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc ); 300 CHKERRQ(ierr); 301 if ( newproc_idx != 0 ) { 302 ierr = PetscFree( newproc_idx ); CHKERRQ(ierr); 303 } 304 } /* repartitioning */ 305 else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 306 307 PetscInt rfactor,targetPE; 308 /* find factor */ 309 if ( new_npe == 1 ) rfactor = npe; /* easy */ 310 else { 311 PetscReal best_fact = 0.; 312 jj = -1; 313 for ( kk = 1 ; kk <= npe ; kk++ ){ 314 if ( npe%kk==0 ) { /* a candidate */ 315 PetscReal nactpe = (PetscReal)npe/(PetscReal)kk, fact = nactpe/(PetscReal)new_npe; 316 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 317 if ( fact > best_fact ) { 318 best_fact = fact; jj = kk; 319 } 320 } 321 } 322 if ( jj != -1 ) rfactor = jj; 323 else rfactor = 1; /* does this happen .. a prime */ 324 } 325 new_npe = npe/rfactor; 326 327 if ( new_npe==nactive ) { 328 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 329 ierr = PetscFree( counts ); CHKERRQ(ierr); 330 if (pc_gamg->verbose>0){ 331 PetscPrintf(wcomm,"\t[%d]%s aggregate processors noop: new_npe=%d, neq(loc)=%d\n",mype,__FUNCT__,new_npe,ncrs_eq); 332 } 333 PetscFunctionReturn(0); 334 } 335 336 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",mype,__FUNCT__,ncrs_eq); 337 targetPE = mype/rfactor; 338 ierr = ISCreateStride( wcomm, ncrs_eq, targetPE, 0, &is_eq_newproc ); CHKERRQ(ierr); 339 340 if ( stokes ) { 341 ierr = ISCreateStride( wcomm, ncrs_prim*cr_bs, targetPE, 0, &is_eq_newproc_prim ); CHKERRQ(ierr); 342 } 343 } /* end simple 'is_eq_newproc' */ 344 345 /* 346 Create an index set from the is_eq_newproc index set to indicate the mapping TO 347 */ 348 ierr = ISPartitioningToNumbering( is_eq_newproc, &is_eq_num ); CHKERRQ(ierr); 349 if ( stokes ) { 350 ierr = ISPartitioningToNumbering( is_eq_newproc_prim, &is_eq_num_prim ); CHKERRQ(ierr); 351 } 352 else is_eq_num_prim = is_eq_num; 353 /* 354 Determine how many equations/vertices are assigned to each processor 355 */ 356 ierr = ISPartitioningCount( is_eq_newproc, npe, counts ); CHKERRQ(ierr); 357 ncrs_eq_new = counts[mype]; 358 ierr = ISDestroy( &is_eq_newproc ); CHKERRQ(ierr); 359 if ( stokes ) { 360 ierr = ISPartitioningCount( is_eq_newproc_prim, npe, counts ); CHKERRQ(ierr); 361 ierr = ISDestroy( &is_eq_newproc_prim ); CHKERRQ(ierr); 362 ncrs_prim_new = counts[mype]/cr_bs; /* nodes */ 363 } 364 else ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */ 365 366 ierr = PetscFree( counts ); CHKERRQ(ierr); 367 #if defined PETSC_GAMG_USE_LOG 368 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0); CHKERRQ(ierr); 369 #endif 370 371 /* move data (for primal equations only) */ 372 /* Create a vector to contain the newly ordered element information */ 373 ierr = VecCreate( wcomm, &dest_crd ); 374 ierr = VecSetSizes( dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE ); CHKERRQ(ierr); 375 ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /* this is needed! */ 376 /* 377 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 378 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 379 */ 380 ierr = PetscMalloc( (ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx ); CHKERRQ(ierr); 381 ierr = ISGetIndices( is_eq_num_prim, &idx ); CHKERRQ(ierr); 382 for (ii=0,jj=0; ii<ncrs_prim ; ii++) { 383 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 384 for ( kk=0; kk<node_data_sz ; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 385 } 386 ierr = ISRestoreIndices( is_eq_num_prim, &idx ); CHKERRQ(ierr); 387 ierr = ISCreateGeneral( wcomm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat ); 388 CHKERRQ(ierr); 389 ierr = PetscFree( tidx ); CHKERRQ(ierr); 390 /* 391 Create a vector to contain the original vertex information for each element 392 */ 393 ierr = VecCreateSeq( PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd ); CHKERRQ(ierr); 394 for ( jj=0; jj<ndata_cols ; jj++ ) { 395 const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows; 396 for ( ii=0 ; ii<ncrs_prim ; ii++) { 397 for ( kk=0; kk<ndata_rows ; kk++ ) { 398 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 399 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 400 ierr = VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES ); CHKERRQ(ierr); 401 } 402 } 403 } 404 ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr); 405 ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr); 406 /* 407 Scatter the element vertex information (still in the original vertex ordering) 408 to the correct processor 409 */ 410 ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat); 411 CHKERRQ(ierr); 412 ierr = ISDestroy( &isscat ); CHKERRQ(ierr); 413 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 414 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 415 ierr = VecScatterDestroy( &vecscat ); CHKERRQ(ierr); 416 ierr = VecDestroy( &src_crd ); CHKERRQ(ierr); 417 /* 418 Put the element vertex data into a new allocation of the gdata->ele 419 */ 420 ierr = PetscFree( pc_gamg->data ); CHKERRQ(ierr); 421 ierr = PetscMalloc( node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data ); CHKERRQ(ierr); 422 pc_gamg->data_sz = node_data_sz*ncrs_prim_new; 423 strideNew = ncrs_prim_new*ndata_rows; 424 ierr = VecGetArray( dest_crd, &array ); CHKERRQ(ierr); 425 for ( jj=0; jj<ndata_cols ; jj++ ) { 426 for ( ii=0 ; ii<ncrs_prim_new ; ii++) { 427 for ( kk=0; kk<ndata_rows ; kk++ ) { 428 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 429 pc_gamg->data[ix] = PetscRealPart(array[jx]); 430 } 431 } 432 } 433 ierr = VecRestoreArray( dest_crd, &array ); CHKERRQ(ierr); 434 ierr = VecDestroy( &dest_crd ); CHKERRQ(ierr); 435 436 /* move A and P (columns) with new layout */ 437 #if defined PETSC_GAMG_USE_LOG 438 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 439 #endif 440 441 /* 442 Invert for MatGetSubMatrix 443 */ 444 ierr = ISInvertPermutation( is_eq_num, ncrs_eq_new, &new_eq_indices ); CHKERRQ(ierr); 445 ierr = ISSort( new_eq_indices ); CHKERRQ(ierr); /* is this needed? */ 446 ierr = ISSetBlockSize( new_eq_indices, cr_bs ); CHKERRQ(ierr); 447 if (is_eq_num != is_eq_num_prim) { 448 ierr = ISDestroy( &is_eq_num_prim ); CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 449 } 450 ierr = ISDestroy( &is_eq_num ); CHKERRQ(ierr); 451 #if defined PETSC_GAMG_USE_LOG 452 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 453 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 454 #endif 455 /* 'a_Amat_crs' output */ 456 { 457 Mat mat; 458 ierr = MatGetSubMatrix( Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat ); 459 CHKERRQ(ierr); 460 *a_Amat_crs = mat; 461 462 if (!PETSC_TRUE){ 463 PetscInt cbs, rbs; 464 ierr = MatGetBlockSizes( Cmat, &rbs, &cbs ); CHKERRQ(ierr); 465 PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs); 466 ierr = MatGetBlockSizes( mat, &rbs, &cbs ); CHKERRQ(ierr); 467 PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",mype,__FUNCT__,rbs,cbs,cr_bs); 468 } 469 } 470 ierr = MatDestroy( &Cmat ); CHKERRQ(ierr); 471 472 #if defined PETSC_GAMG_USE_LOG 473 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 474 #endif 475 /* prolongator */ 476 { 477 IS findices; 478 PetscInt Istart,Iend; 479 Mat Pnew; 480 ierr = MatGetOwnershipRange( Pold, &Istart, &Iend ); CHKERRQ(ierr); 481 #if defined PETSC_GAMG_USE_LOG 482 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 483 #endif 484 ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices); CHKERRQ(ierr); 485 ierr = ISSetBlockSize(findices,f_bs); CHKERRQ(ierr); 486 ierr = MatGetSubMatrix( Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew ); 487 CHKERRQ(ierr); 488 ierr = ISDestroy( &findices ); CHKERRQ(ierr); 489 490 if (!PETSC_TRUE){ 491 PetscInt cbs, rbs; 492 ierr = MatGetBlockSizes( Pold, &rbs, &cbs ); CHKERRQ(ierr); 493 PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs); 494 ierr = MatGetBlockSizes( Pnew, &rbs, &cbs ); CHKERRQ(ierr); 495 PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs); 496 } 497 #if defined PETSC_GAMG_USE_LOG 498 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 499 #endif 500 ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr); 501 502 /* output - repartitioned */ 503 *a_P_inout = Pnew; 504 } 505 ierr = ISDestroy( &new_eq_indices ); CHKERRQ(ierr); 506 507 *a_nactive_proc = new_npe; /* output */ 508 } 509 510 /* outout matrix data */ 511 if ( !PETSC_TRUE ) { 512 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 513 if (llev==0) { 514 sprintf(fname,"Cmat_%d.m",llev++); 515 PetscViewerASCIIOpen(wcomm,fname,&viewer); 516 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 517 ierr = MatView(Amat_fine, viewer ); CHKERRQ(ierr); 518 ierr = PetscViewerDestroy( &viewer ); 519 } 520 sprintf(fname,"Cmat_%d.m",llev++); 521 PetscViewerASCIIOpen(wcomm,fname,&viewer); 522 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 523 ierr = MatView(Cmat, viewer ); CHKERRQ(ierr); 524 ierr = PetscViewerDestroy( &viewer ); 525 } 526 527 PetscFunctionReturn(0); 528 } 529 530 /* -------------------------------------------------------------------------- */ 531 /* 532 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 533 by setting data structures and options. 534 535 Input Parameter: 536 . pc - the preconditioner context 537 538 Application Interface Routine: PCSetUp() 539 540 Notes: 541 The interface routine PCSetUp() is not usually called directly by 542 the user, but instead is called by PCApply() if necessary. 543 */ 544 #undef __FUNCT__ 545 #define __FUNCT__ "PCSetUp_GAMG" 546 PetscErrorCode PCSetUp_GAMG( PC pc ) 547 { 548 PetscErrorCode ierr; 549 PC_MG *mg = (PC_MG*)pc->data; 550 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 551 Mat Pmat = pc->pmat; 552 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 553 MPI_Comm wcomm = ((PetscObject)pc)->comm; 554 PetscMPIInt mype,npe,nactivepe; 555 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 556 PetscReal emaxs[GAMG_MAXLEVELS]; 557 IS *ASMLocalIDsArr[GAMG_MAXLEVELS]; 558 GAMGKKTMat kktMatsArr[GAMG_MAXLEVELS]; 559 PetscLogDouble nnz0=0.,nnztot=0.; 560 MatInfo info; 561 PetscBool stokes = PETSC_FALSE, redo_mesh_setup = PETSC_FALSE; 562 563 PetscFunctionBegin; 564 ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr); 565 ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr); 566 if (pc_gamg->verbose>2) PetscPrintf(wcomm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",mype,__FUNCT__,pc_gamg->setup_count,pc->setupcalled); 567 if ( pc_gamg->setup_count++ > 0 ) { 568 if ( redo_mesh_setup ) { 569 /* reset everything */ 570 ierr = PCReset_MG( pc ); CHKERRQ(ierr); 571 pc->setupcalled = 0; 572 } 573 else { 574 PC_MG_Levels **mglevels = mg->levels; 575 /* just do Galerkin grids */ 576 Mat B,dA,dB; 577 assert(pc->setupcalled); 578 579 if ( pc_gamg->Nlevels > 1 ) { 580 /* currently only handle case where mat and pmat are the same on coarser levels */ 581 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);CHKERRQ(ierr); 582 /* (re)set to get dirty flag */ 583 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 584 585 for (level=pc_gamg->Nlevels-2; level>-1; level--) { 586 /* the first time through the matrix structure has changed from repartitioning */ 587 if ( pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) { 588 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 589 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 590 mglevels[level]->A = B; 591 } 592 else { 593 ierr = KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);CHKERRQ(ierr); 594 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 595 } 596 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN); CHKERRQ(ierr); 597 dB = B; 598 } 599 } 600 601 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 602 603 /* PCSetUp_MG seems to insists on setting this to GMRES */ 604 ierr = KSPSetType( mglevels[0]->smoothd, KSPPREONLY ); CHKERRQ(ierr); 605 606 PetscFunctionReturn(0); 607 } 608 } 609 610 ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,PETSC_NULL);CHKERRQ(ierr); 611 612 ierr = GAMGKKTMatCreate( Pmat, stokes, &kktMatsArr[0] ); CHKERRQ(ierr); 613 614 if ( !pc_gamg->data ) { 615 if ( pc_gamg->orig_data ) { 616 ierr = MatGetBlockSize( Pmat, &bs ); CHKERRQ(ierr); 617 ierr = MatGetLocalSize( Pmat, &qq, PETSC_NULL ); CHKERRQ(ierr); 618 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 619 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 620 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 621 ierr = PetscMalloc( pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->data ); CHKERRQ(ierr); 622 for (qq=0;qq<pc_gamg->data_sz;qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 623 } 624 else { 625 if ( !pc_gamg->createdefaultdata ){ 626 SETERRQ(wcomm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 627 } 628 if ( stokes ) { 629 SETERRQ(wcomm,PETSC_ERR_PLIB,"Need data (eg, PCSetCoordinates) for Stokes problems"); 630 } 631 ierr = pc_gamg->createdefaultdata( pc, kktMatsArr[0].A11 ); CHKERRQ(ierr); 632 } 633 } 634 635 /* cache original data for reuse */ 636 if ( !pc_gamg->orig_data && redo_mesh_setup ) { 637 ierr = PetscMalloc( pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->orig_data ); CHKERRQ(ierr); 638 for (qq=0;qq<pc_gamg->data_sz;qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 639 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 640 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 641 } 642 643 /* get basic dims */ 644 if ( stokes ) { 645 bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */ 646 } 647 else { 648 ierr = MatGetBlockSize( Pmat, &bs ); CHKERRQ(ierr); 649 } 650 651 ierr = MatGetSize( Pmat, &M, &qq );CHKERRQ(ierr); 652 if (pc_gamg->verbose) { 653 if (pc_gamg->verbose==1) ierr = MatGetInfo(Pmat,MAT_LOCAL,&info); 654 else ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info); 655 CHKERRQ(ierr); 656 nnz0 = info.nz_used; 657 nnztot = info.nz_used; 658 PetscPrintf(wcomm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 659 mype,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 660 (int)(nnz0/(PetscReal)M),npe); 661 } 662 663 /* Get A_i and R_i */ 664 for ( level=0, Aarr[0]=Pmat, nactivepe = npe; /* hard wired stopping logic */ 665 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (npe==1 || nactivepe>1); */ 666 level++ ){ 667 level1 = level + 1; 668 #if defined PETSC_GAMG_USE_LOG 669 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0); CHKERRQ(ierr); 670 #if (defined GAMG_STAGES) 671 ierr = PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr ); 672 #endif 673 #endif 674 /* deal with Stokes, get sub matrices */ 675 if ( level > 0 ) { 676 ierr = GAMGKKTMatCreate( Aarr[level], stokes, &kktMatsArr[level] ); CHKERRQ(ierr); 677 } 678 { /* construct prolongator */ 679 Mat Gmat; 680 PetscCoarsenData *agg_lists; 681 Mat Prol11,Prol22; 682 683 ierr = pc_gamg->graph( pc,kktMatsArr[level].A11, &Gmat ); CHKERRQ(ierr); 684 ierr = pc_gamg->coarsen( pc, &Gmat, &agg_lists ); CHKERRQ(ierr); 685 ierr = pc_gamg->prolongator( pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11 ); CHKERRQ(ierr); 686 687 /* could have failed to create new level */ 688 if ( Prol11 ){ 689 /* get new block size of coarse matrices */ 690 ierr = MatGetBlockSizes( Prol11, PETSC_NULL, &bs ); CHKERRQ(ierr); 691 692 if ( stokes ) { 693 if (!pc_gamg->formkktprol) SETERRQ(wcomm,PETSC_ERR_USER,"Stokes not supportd by AMG method."); 694 /* R A12 == (T = A21 P)'; G = T' T; coarsen G; form plain agg with G */ 695 ierr = pc_gamg->formkktprol( pc, Prol11, kktMatsArr[level].A21, &Prol22 ); CHKERRQ(ierr); 696 } 697 698 if ( pc_gamg->optprol ){ 699 /* smooth */ 700 ierr = pc_gamg->optprol( pc, kktMatsArr[level].A11, &Prol11 ); CHKERRQ(ierr); 701 } 702 703 if ( stokes ) { 704 IS is_row[2] = {kktMatsArr[level].prim_is,kktMatsArr[level].constr_is}; 705 Mat a[4] = {Prol11, PETSC_NULL, PETSC_NULL, Prol22 }; 706 ierr = MatCreateNest(wcomm,2,is_row, 2, is_row, a, &Parr[level1] ); CHKERRQ(ierr); 707 } 708 else { 709 Parr[level1] = Prol11; 710 } 711 } 712 else Parr[level1] = PETSC_NULL; 713 714 if ( pc_gamg->use_aggs_in_gasm ) { 715 ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level] ); 716 CHKERRQ(ierr); 717 } 718 719 720 721 ierr = MatDestroy( &Gmat ); CHKERRQ(ierr); 722 ierr = PetscCDDestroy( agg_lists ); CHKERRQ(ierr); 723 } /* construct prolongator scope */ 724 #if defined PETSC_GAMG_USE_LOG 725 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 726 #endif 727 /* cache eigen estimate */ 728 if ( pc_gamg->emax_id != -1 ){ 729 PetscBool flag; 730 ierr = PetscObjectComposedDataGetReal( (PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag ); 731 CHKERRQ( ierr ); 732 if ( !flag ) emaxs[level] = -1.; 733 } 734 else emaxs[level] = -1.; 735 if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 736 if ( !Parr[level1] ) { 737 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s stop gridding, level %d\n",mype,__FUNCT__,level); 738 break; 739 } 740 #if defined PETSC_GAMG_USE_LOG 741 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 742 #endif 743 744 ierr = createLevel( pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2), 745 stokes, &Parr[level1], &Aarr[level1], &nactivepe ); 746 CHKERRQ(ierr); 747 748 #if defined PETSC_GAMG_USE_LOG 749 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 750 #endif 751 ierr = MatGetSize( Aarr[level1], &M, &qq );CHKERRQ(ierr); 752 753 if (pc_gamg->verbose > 0){ 754 PetscInt NN = M; 755 if (pc_gamg->verbose==1) { 756 ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info); CHKERRQ(ierr); 757 ierr = MatGetLocalSize( Aarr[level1], &NN, &qq ); 758 } 759 else ierr = MatGetInfo( Aarr[level1], MAT_GLOBAL_SUM, &info ); 760 761 CHKERRQ(ierr); 762 nnztot += info.nz_used; 763 PetscPrintf(wcomm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 764 mype,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols, 765 (int)(info.nz_used/(PetscReal)NN), nactivepe ); 766 CHKERRQ(ierr); 767 } 768 769 /* stop if one node -- could pull back for singular problems */ 770 if ( M/pc_gamg->data_cell_cols < 2 ) { 771 level++; 772 break; 773 } 774 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 775 ierr = PetscLogStagePop(); CHKERRQ( ierr ); 776 #endif 777 } /* levels */ 778 779 if ( pc_gamg->data ) { 780 ierr = PetscFree( pc_gamg->data ); CHKERRQ( ierr ); 781 pc_gamg->data = PETSC_NULL; 782 } 783 784 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0); 785 pc_gamg->Nlevels = level + 1; 786 fine_level = level; 787 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,PETSC_NULL);CHKERRQ(ierr); 788 789 /* simple setup */ 790 if ( !PETSC_TRUE ){ 791 PC_MG_Levels **mglevels = mg->levels; 792 for (lidx=0,level=pc_gamg->Nlevels-1; 793 lidx<fine_level; 794 lidx++, level--){ 795 ierr = PCMGSetInterpolation( pc, lidx+1, Parr[level] );CHKERRQ(ierr); 796 ierr = KSPSetOperators( mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );CHKERRQ(ierr); 797 ierr = MatDestroy( &Parr[level] ); CHKERRQ(ierr); 798 ierr = MatDestroy( &Aarr[level] ); CHKERRQ(ierr); 799 } 800 ierr = KSPSetOperators( mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 801 802 ierr = PCSetUp_MG( pc ); CHKERRQ( ierr ); 803 } 804 else if ( pc_gamg->Nlevels > 1 ) { /* don't setup MG if one level */ 805 /* set default smoothers & set operators */ 806 for ( lidx = 1, level = pc_gamg->Nlevels-2; 807 lidx <= fine_level; 808 lidx++, level--) { 809 KSP smoother; 810 PC subpc; 811 812 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 813 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 814 815 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 816 /* set ops */ 817 ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 818 ierr = PCMGSetInterpolation( pc, lidx, Parr[level+1] );CHKERRQ(ierr); 819 820 /* create field split PC, get subsmoother */ 821 if ( stokes ) { 822 KSP *ksps; 823 PetscInt nn; 824 ierr = PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is); CHKERRQ(ierr); 825 ierr = PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is); CHKERRQ(ierr); 826 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps); CHKERRQ(ierr); 827 smoother = ksps[0]; 828 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 829 ierr = PetscFree( ksps ); CHKERRQ(ierr); 830 } 831 ierr = GAMGKKTMatDestroy( &kktMatsArr[level] ); CHKERRQ(ierr); 832 833 /* set defaults */ 834 ierr = KSPSetType( smoother, KSPCHEBYSHEV );CHKERRQ(ierr); 835 836 /* override defaults and command line args (!) */ 837 if ( pc_gamg->use_aggs_in_gasm ) { 838 PetscInt sz; 839 IS *is; 840 841 sz = nASMBlocksArr[level]; 842 is = ASMLocalIDsArr[level]; 843 ierr = PCSetType( subpc, PCGASM ); CHKERRQ(ierr); 844 if (sz==0){ 845 IS is; 846 PetscInt my0,kk; 847 ierr = MatGetOwnershipRange( Aarr[level], &my0, &kk ); CHKERRQ(ierr); 848 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is ); CHKERRQ(ierr); 849 ierr = PCGASMSetSubdomains( subpc, 1, &is, PETSC_NULL ); CHKERRQ(ierr); 850 ierr = ISDestroy( &is ); CHKERRQ(ierr); 851 } 852 else { 853 PetscInt kk; 854 ierr = PCGASMSetSubdomains( subpc, sz, is, PETSC_NULL ); CHKERRQ(ierr); 855 for (kk=0;kk<sz;kk++){ 856 ierr = ISDestroy( &is[kk] ); CHKERRQ(ierr); 857 } 858 ierr = PetscFree( is ); CHKERRQ(ierr); 859 } 860 ierr = PCGASMSetOverlap( subpc, 0 ); CHKERRQ(ierr); 861 862 ASMLocalIDsArr[level] = PETSC_NULL; 863 nASMBlocksArr[level] = 0; 864 ierr = PCGASMSetType( subpc, PC_GASM_BASIC ); CHKERRQ(ierr); 865 } 866 else { 867 ierr = PCSetType( subpc, PCJACOBI ); CHKERRQ(ierr); 868 } 869 } 870 { 871 /* coarse grid */ 872 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 873 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 874 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 875 ierr = KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 876 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 877 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 878 ierr = PCSetType( subpc, PCBJACOBI ); CHKERRQ(ierr); 879 ierr = PCSetUp( subpc ); CHKERRQ(ierr); 880 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); assert(ii==1); 881 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 882 ierr = PCSetType( pc2, PCLU ); CHKERRQ(ierr); 883 } 884 885 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 886 ierr = PetscObjectOptionsBegin( (PetscObject)pc );CHKERRQ(ierr); 887 ierr = PCSetFromOptions_MG( pc ); CHKERRQ(ierr); 888 ierr = PetscOptionsEnd(); CHKERRQ(ierr); 889 if (mg->galerkin != 2) SETERRQ(wcomm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used."); 890 891 /* create cheby smoothers */ 892 for ( lidx = 1, level = pc_gamg->Nlevels-2; 893 lidx <= fine_level; 894 lidx++, level--) { 895 KSP smoother; 896 PetscBool flag; 897 PC subpc; 898 899 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 900 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 901 902 /* create field split PC, get subsmoother */ 903 if ( stokes ) { 904 KSP *ksps; 905 PetscInt nn; 906 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps); CHKERRQ(ierr); 907 smoother = ksps[0]; 908 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 909 ierr = PetscFree( ksps ); CHKERRQ(ierr); 910 } 911 912 /* do my own cheby */ 913 ierr = PetscObjectTypeCompare( (PetscObject)smoother, KSPCHEBYSHEV, &flag ); CHKERRQ(ierr); 914 if ( flag ) { 915 PetscReal emax, emin; 916 ierr = PetscObjectTypeCompare( (PetscObject)subpc, PCJACOBI, &flag ); CHKERRQ(ierr); 917 if ( flag && emaxs[level] > 0.0 ) emax=emaxs[level]; /* eigen estimate only for diagnal PC */ 918 else{ /* eigen estimate 'emax' */ 919 KSP eksp; 920 Mat Lmat = Aarr[level]; 921 Vec bb, xx; 922 923 ierr = MatGetVecs( Lmat, &bb, 0 ); CHKERRQ(ierr); 924 ierr = MatGetVecs( Lmat, &xx, 0 ); CHKERRQ(ierr); 925 { 926 PetscRandom rctx; 927 ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr); 928 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 929 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 930 ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr); 931 } 932 933 /* zeroing out BC rows -- needed for crazy matrices */ 934 { 935 PetscInt Istart,Iend,ncols,jj,Ii; 936 PetscScalar zero = 0.0; 937 ierr = MatGetOwnershipRange( Lmat, &Istart, &Iend ); CHKERRQ(ierr); 938 for ( Ii = Istart, jj = 0 ; Ii < Iend ; Ii++, jj++ ) { 939 ierr = MatGetRow(Lmat,Ii,&ncols,0,0); CHKERRQ(ierr); 940 if( ncols <= 1 ) { 941 ierr = VecSetValues( bb, 1, &Ii, &zero, INSERT_VALUES ); CHKERRQ(ierr); 942 } 943 ierr = MatRestoreRow(Lmat,Ii,&ncols,0,0); CHKERRQ(ierr); 944 } 945 ierr = VecAssemblyBegin(bb); CHKERRQ(ierr); 946 ierr = VecAssemblyEnd(bb); CHKERRQ(ierr); 947 } 948 949 ierr = KSPCreate( wcomm, &eksp );CHKERRQ(ierr); 950 ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 ); 951 CHKERRQ(ierr); 952 ierr = KSPSetNormType( eksp, KSP_NORM_NONE ); CHKERRQ(ierr); 953 ierr = KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);CHKERRQ(ierr); 954 ierr = KSPAppendOptionsPrefix( eksp, "gamg_est_"); CHKERRQ(ierr); 955 ierr = KSPSetFromOptions( eksp ); CHKERRQ(ierr); 956 957 ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr); 958 ierr = KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr ); 959 ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr); 960 961 /* set PC type to be same as smoother */ 962 ierr = KSPSetPC( eksp, subpc ); CHKERRQ( ierr ); 963 964 /* solve - keep stuff out of logging */ 965 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 966 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 967 ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr); 968 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 969 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 970 971 ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr); 972 973 ierr = VecDestroy( &xx ); CHKERRQ(ierr); 974 ierr = VecDestroy( &bb ); CHKERRQ(ierr); 975 ierr = KSPDestroy( &eksp ); CHKERRQ(ierr); 976 977 if ( pc_gamg->verbose > 0 ) { 978 PetscInt N1, tt; 979 ierr = MatGetSize( Aarr[level], &N1, &tt ); CHKERRQ(ierr); 980 PetscPrintf(wcomm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1); 981 } 982 } 983 { 984 PetscInt N1, N0; 985 ierr = MatGetSize( Aarr[level], &N1, PETSC_NULL ); CHKERRQ(ierr); 986 ierr = MatGetSize( Aarr[level+1], &N0, PETSC_NULL ); CHKERRQ(ierr); 987 /* heuristic - is this crap? */ 988 /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */ 989 emin = emax * pc_gamg->eigtarget[0]; 990 emax *= pc_gamg->eigtarget[1]; 991 } 992 ierr = KSPChebyshevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr); 993 } /* setup checby flag */ 994 } /* non-coarse levels */ 995 996 /* clean up */ 997 for (level=1;level<pc_gamg->Nlevels;level++){ 998 ierr = MatDestroy( &Parr[level] ); CHKERRQ(ierr); 999 ierr = MatDestroy( &Aarr[level] ); CHKERRQ(ierr); 1000 } 1001 1002 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 1003 1004 if ( PETSC_FALSE ){ 1005 KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */ 1006 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 1007 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 1008 } 1009 } 1010 else { 1011 KSP smoother; 1012 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",mype,__FUNCT__); 1013 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 1014 ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 1015 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 1016 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 1017 } 1018 1019 PetscFunctionReturn(0); 1020 } 1021 1022 /* ------------------------------------------------------------------------- */ 1023 /* 1024 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 1025 that was created with PCCreate_GAMG(). 1026 1027 Input Parameter: 1028 . pc - the preconditioner context 1029 1030 Application Interface Routine: PCDestroy() 1031 */ 1032 #undef __FUNCT__ 1033 #define __FUNCT__ "PCDestroy_GAMG" 1034 PetscErrorCode PCDestroy_GAMG( PC pc ) 1035 { 1036 PetscErrorCode ierr; 1037 PC_MG *mg = (PC_MG*)pc->data; 1038 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 1039 1040 PetscFunctionBegin; 1041 ierr = PCReset_GAMG( pc );CHKERRQ(ierr); 1042 ierr = PetscFree( pc_gamg );CHKERRQ(ierr); 1043 ierr = PCDestroy_MG( pc );CHKERRQ(ierr); 1044 PetscFunctionReturn(0); 1045 } 1046 1047 1048 #undef __FUNCT__ 1049 #define __FUNCT__ "PCGAMGSetProcEqLim" 1050 /*@ 1051 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via 1052 processor reduction. 1053 1054 Not Collective on PC 1055 1056 Input Parameters: 1057 . pc - the preconditioner context 1058 1059 1060 Options Database Key: 1061 . -pc_gamg_process_eq_limit 1062 1063 Level: intermediate 1064 1065 Concepts: Unstructured multrigrid preconditioner 1066 1067 .seealso: () 1068 @*/ 1069 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 1070 { 1071 PetscErrorCode ierr; 1072 1073 PetscFunctionBegin; 1074 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1075 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1076 PetscFunctionReturn(0); 1077 } 1078 1079 EXTERN_C_BEGIN 1080 #undef __FUNCT__ 1081 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 1082 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 1083 { 1084 PC_MG *mg = (PC_MG*)pc->data; 1085 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1086 1087 PetscFunctionBegin; 1088 if (n>0) pc_gamg->min_eq_proc = n; 1089 PetscFunctionReturn(0); 1090 } 1091 EXTERN_C_END 1092 1093 #undef __FUNCT__ 1094 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 1095 /*@ 1096 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 1097 1098 Collective on PC 1099 1100 Input Parameters: 1101 . pc - the preconditioner context 1102 1103 1104 Options Database Key: 1105 . -pc_gamg_coarse_eq_limit 1106 1107 Level: intermediate 1108 1109 Concepts: Unstructured multrigrid preconditioner 1110 1111 .seealso: () 1112 @*/ 1113 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 1114 { 1115 PetscErrorCode ierr; 1116 1117 PetscFunctionBegin; 1118 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1119 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1120 PetscFunctionReturn(0); 1121 } 1122 1123 EXTERN_C_BEGIN 1124 #undef __FUNCT__ 1125 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 1126 PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 1127 { 1128 PC_MG *mg = (PC_MG*)pc->data; 1129 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1130 1131 PetscFunctionBegin; 1132 if (n>0) pc_gamg->coarse_eq_limit = n; 1133 PetscFunctionReturn(0); 1134 } 1135 EXTERN_C_END 1136 1137 #undef __FUNCT__ 1138 #define __FUNCT__ "PCGAMGSetRepartitioning" 1139 /*@ 1140 PCGAMGSetRepartitioning - Repartition the coarse grids 1141 1142 Collective on PC 1143 1144 Input Parameters: 1145 . pc - the preconditioner context 1146 1147 1148 Options Database Key: 1149 . -pc_gamg_repartition 1150 1151 Level: intermediate 1152 1153 Concepts: Unstructured multrigrid preconditioner 1154 1155 .seealso: () 1156 @*/ 1157 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 1158 { 1159 PetscErrorCode ierr; 1160 1161 PetscFunctionBegin; 1162 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1163 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1164 PetscFunctionReturn(0); 1165 } 1166 1167 EXTERN_C_BEGIN 1168 #undef __FUNCT__ 1169 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 1170 PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 1171 { 1172 PC_MG *mg = (PC_MG*)pc->data; 1173 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1174 1175 PetscFunctionBegin; 1176 pc_gamg->repart = n; 1177 PetscFunctionReturn(0); 1178 } 1179 EXTERN_C_END 1180 1181 #undef __FUNCT__ 1182 #define __FUNCT__ "PCGAMGSetUseASMAggs" 1183 /*@ 1184 PCGAMGSetUseASMAggs - 1185 1186 Collective on PC 1187 1188 Input Parameters: 1189 . pc - the preconditioner context 1190 1191 1192 Options Database Key: 1193 . -pc_gamg_use_agg_gasm 1194 1195 Level: intermediate 1196 1197 Concepts: Unstructured multrigrid preconditioner 1198 1199 .seealso: () 1200 @*/ 1201 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 1202 { 1203 PetscErrorCode ierr; 1204 1205 PetscFunctionBegin; 1206 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1207 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1208 PetscFunctionReturn(0); 1209 } 1210 1211 EXTERN_C_BEGIN 1212 #undef __FUNCT__ 1213 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 1214 PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 1215 { 1216 PC_MG *mg = (PC_MG*)pc->data; 1217 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1218 1219 PetscFunctionBegin; 1220 pc_gamg->use_aggs_in_gasm = n; 1221 PetscFunctionReturn(0); 1222 } 1223 EXTERN_C_END 1224 1225 #undef __FUNCT__ 1226 #define __FUNCT__ "PCGAMGSetNlevels" 1227 /*@ 1228 PCGAMGSetNlevels - 1229 1230 Not collective on PC 1231 1232 Input Parameters: 1233 . pc - the preconditioner context 1234 1235 1236 Options Database Key: 1237 . -pc_mg_levels 1238 1239 Level: intermediate 1240 1241 Concepts: Unstructured multrigrid preconditioner 1242 1243 .seealso: () 1244 @*/ 1245 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1246 { 1247 PetscErrorCode ierr; 1248 1249 PetscFunctionBegin; 1250 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1251 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1252 PetscFunctionReturn(0); 1253 } 1254 1255 EXTERN_C_BEGIN 1256 #undef __FUNCT__ 1257 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1258 PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1259 { 1260 PC_MG *mg = (PC_MG*)pc->data; 1261 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1262 1263 PetscFunctionBegin; 1264 pc_gamg->Nlevels = n; 1265 PetscFunctionReturn(0); 1266 } 1267 EXTERN_C_END 1268 1269 #undef __FUNCT__ 1270 #define __FUNCT__ "PCGAMGSetThreshold" 1271 /*@ 1272 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1273 1274 Not collective on PC 1275 1276 Input Parameters: 1277 . pc - the preconditioner context 1278 1279 1280 Options Database Key: 1281 . -pc_gamg_threshold 1282 1283 Level: intermediate 1284 1285 Concepts: Unstructured multrigrid preconditioner 1286 1287 .seealso: () 1288 @*/ 1289 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1290 { 1291 PetscErrorCode ierr; 1292 1293 PetscFunctionBegin; 1294 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1295 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1296 PetscFunctionReturn(0); 1297 } 1298 1299 EXTERN_C_BEGIN 1300 #undef __FUNCT__ 1301 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1302 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1303 { 1304 PC_MG *mg = (PC_MG*)pc->data; 1305 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1306 1307 PetscFunctionBegin; 1308 pc_gamg->threshold = n; 1309 PetscFunctionReturn(0); 1310 } 1311 EXTERN_C_END 1312 1313 #undef __FUNCT__ 1314 #define __FUNCT__ "PCGAMGSetType" 1315 /*@ 1316 PCGAMGSetType - Set solution method - calls sub create method 1317 1318 Collective on PC 1319 1320 Input Parameters: 1321 . pc - the preconditioner context 1322 1323 1324 Options Database Key: 1325 . -pc_gamg_type 1326 1327 Level: intermediate 1328 1329 Concepts: Unstructured multrigrid preconditioner 1330 1331 .seealso: () 1332 @*/ 1333 PetscErrorCode PCGAMGSetType( PC pc, PCGAMGType type ) 1334 { 1335 PetscErrorCode ierr; 1336 1337 PetscFunctionBegin; 1338 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1339 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type)); 1340 CHKERRQ(ierr); 1341 PetscFunctionReturn(0); 1342 } 1343 1344 EXTERN_C_BEGIN 1345 #undef __FUNCT__ 1346 #define __FUNCT__ "PCGAMGSetType_GAMG" 1347 PetscErrorCode PCGAMGSetType_GAMG( PC pc, PCGAMGType type ) 1348 { 1349 PetscErrorCode ierr,(*r)(PC); 1350 1351 PetscFunctionBegin; 1352 ierr = PetscFListFind(GAMGList,((PetscObject)pc)->comm,type,PETSC_FALSE,(PetscVoidStarFunction)&r); 1353 CHKERRQ(ierr); 1354 1355 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1356 1357 /* call sub create method */ 1358 ierr = (*r)(pc); CHKERRQ(ierr); 1359 1360 PetscFunctionReturn(0); 1361 } 1362 EXTERN_C_END 1363 1364 #undef __FUNCT__ 1365 #define __FUNCT__ "PCSetFromOptions_GAMG" 1366 PetscErrorCode PCSetFromOptions_GAMG( PC pc ) 1367 { 1368 PetscErrorCode ierr; 1369 PC_MG *mg = (PC_MG*)pc->data; 1370 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1371 PetscBool flag; 1372 PetscInt two = 2; 1373 MPI_Comm wcomm = ((PetscObject)pc)->comm; 1374 1375 PetscFunctionBegin; 1376 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 1377 { 1378 /* -pc_gamg_verbose */ 1379 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG", 1380 "none", pc_gamg->verbose, 1381 &pc_gamg->verbose, PETSC_NULL ); 1382 CHKERRQ(ierr); 1383 1384 /* -pc_gamg_repartition */ 1385 ierr = PetscOptionsBool("-pc_gamg_repartition", 1386 "Repartion coarse grids (false)", 1387 "PCGAMGRepartitioning", 1388 pc_gamg->repart, 1389 &pc_gamg->repart, 1390 &flag); 1391 CHKERRQ(ierr); 1392 1393 /* -pc_gamg_use_agg_gasm */ 1394 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm", 1395 "Use aggregation agragates for GASM smoother (false)", 1396 "PCGAMGUseASMAggs", 1397 pc_gamg->use_aggs_in_gasm, 1398 &pc_gamg->use_aggs_in_gasm, 1399 &flag); 1400 CHKERRQ(ierr); 1401 1402 /* -pc_gamg_process_eq_limit */ 1403 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit", 1404 "Limit (goal) on number of equations per process on coarse grids", 1405 "PCGAMGSetProcEqLim", 1406 pc_gamg->min_eq_proc, 1407 &pc_gamg->min_eq_proc, 1408 &flag ); 1409 CHKERRQ(ierr); 1410 1411 /* -pc_gamg_coarse_eq_limit */ 1412 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit", 1413 "Limit on number of equations for the coarse grid", 1414 "PCGAMGSetCoarseEqLim", 1415 pc_gamg->coarse_eq_limit, 1416 &pc_gamg->coarse_eq_limit, 1417 &flag ); 1418 CHKERRQ(ierr); 1419 1420 /* -pc_gamg_threshold */ 1421 ierr = PetscOptionsReal("-pc_gamg_threshold", 1422 "Relative threshold to use for dropping edges in aggregation graph", 1423 "PCGAMGSetThreshold", 1424 pc_gamg->threshold, 1425 &pc_gamg->threshold, 1426 &flag ); 1427 CHKERRQ(ierr); 1428 if (flag && pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold); 1429 1430 ierr = PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,PETSC_NULL);CHKERRQ(ierr); 1431 1432 ierr = PetscOptionsInt("-pc_mg_levels", 1433 "Set number of MG levels", 1434 "PCGAMGSetNlevels", 1435 pc_gamg->Nlevels, 1436 &pc_gamg->Nlevels, 1437 &flag ); 1438 } 1439 ierr = PetscOptionsTail();CHKERRQ(ierr); 1440 1441 PetscFunctionReturn(0); 1442 } 1443 1444 /* -------------------------------------------------------------------------- */ 1445 /*MC 1446 PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework. 1447 - This is the entry point to GAMG, registered in pcregis.c 1448 1449 Options Database Keys: 1450 Multigrid options(inherited) 1451 + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType) 1452 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1453 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1454 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1455 1456 Level: intermediate 1457 1458 Concepts: multigrid 1459 1460 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType, 1461 PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(), 1462 PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(), 1463 PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(), 1464 PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR() 1465 M*/ 1466 EXTERN_C_BEGIN 1467 #undef __FUNCT__ 1468 #define __FUNCT__ "PCCreate_GAMG" 1469 PetscErrorCode PCCreate_GAMG( PC pc ) 1470 { 1471 PetscErrorCode ierr; 1472 PC_GAMG *pc_gamg; 1473 PC_MG *mg; 1474 #if defined PETSC_GAMG_USE_LOG 1475 static long count = 0; 1476 #endif 1477 1478 PetscFunctionBegin; 1479 1480 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1481 ierr = PCSetType( pc, PCMG ); CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1482 ierr = PetscObjectChangeTypeName( (PetscObject)pc, PCGAMG ); CHKERRQ(ierr); 1483 1484 /* create a supporting struct and attach it to pc */ 1485 ierr = PetscNewLog( pc, PC_GAMG, &pc_gamg ); CHKERRQ(ierr); 1486 mg = (PC_MG*)pc->data; 1487 mg->galerkin = 2; /* Use Galerkin, but it is computed externally */ 1488 mg->innerctx = pc_gamg; 1489 1490 pc_gamg->setup_count = 0; 1491 /* these should be in subctx but repartitioning needs simple arrays */ 1492 pc_gamg->data_sz = 0; 1493 pc_gamg->data = 0; 1494 1495 /* register AMG type */ 1496 if ( !GAMGList ){ 1497 ierr = PetscFListAdd(&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void(*)(void))PCCreateGAMG_GEO);CHKERRQ(ierr); 1498 ierr = PetscFListAdd(&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void(*)(void))PCCreateGAMG_AGG);CHKERRQ(ierr); 1499 } 1500 1501 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1502 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1503 pc->ops->setup = PCSetUp_GAMG; 1504 pc->ops->reset = PCReset_GAMG; 1505 pc->ops->destroy = PCDestroy_GAMG; 1506 1507 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1508 "PCGAMGSetProcEqLim_C", 1509 "PCGAMGSetProcEqLim_GAMG", 1510 PCGAMGSetProcEqLim_GAMG); 1511 CHKERRQ(ierr); 1512 1513 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1514 "PCGAMGSetCoarseEqLim_C", 1515 "PCGAMGSetCoarseEqLim_GAMG", 1516 PCGAMGSetCoarseEqLim_GAMG); 1517 CHKERRQ(ierr); 1518 1519 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1520 "PCGAMGSetRepartitioning_C", 1521 "PCGAMGSetRepartitioning_GAMG", 1522 PCGAMGSetRepartitioning_GAMG); 1523 CHKERRQ(ierr); 1524 1525 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1526 "PCGAMGSetUseASMAggs_C", 1527 "PCGAMGSetUseASMAggs_GAMG", 1528 PCGAMGSetUseASMAggs_GAMG); 1529 CHKERRQ(ierr); 1530 1531 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1532 "PCGAMGSetThreshold_C", 1533 "PCGAMGSetThreshold_GAMG", 1534 PCGAMGSetThreshold_GAMG); 1535 CHKERRQ(ierr); 1536 1537 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1538 "PCGAMGSetType_C", 1539 "PCGAMGSetType_GAMG", 1540 PCGAMGSetType_GAMG); 1541 CHKERRQ(ierr); 1542 1543 pc_gamg->repart = PETSC_FALSE; 1544 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1545 pc_gamg->min_eq_proc = 100; 1546 pc_gamg->coarse_eq_limit = 800; 1547 pc_gamg->threshold = 0.001; 1548 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1549 pc_gamg->verbose = 0; 1550 pc_gamg->emax_id = -1; 1551 pc_gamg->eigtarget[0] = 0.05; 1552 pc_gamg->eigtarget[1] = 1.05; 1553 1554 /* private events */ 1555 #if defined PETSC_GAMG_USE_LOG 1556 if ( count++ == 0 ) { 1557 PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]); 1558 PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]); 1559 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1560 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1561 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1562 PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]); 1563 PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]); 1564 PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]); 1565 PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]); 1566 PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]); 1567 PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]); 1568 PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]); 1569 PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]); 1570 PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]); 1571 PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]); 1572 PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]); 1573 PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]); 1574 1575 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1576 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1577 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1578 /* create timer stages */ 1579 #if defined GAMG_STAGES 1580 { 1581 char str[32]; 1582 sprintf(str,"MG Level %d (finest)",0); 1583 PetscLogStageRegister(str, &gamg_stages[0]); 1584 PetscInt lidx; 1585 for (lidx=1;lidx<9;lidx++){ 1586 sprintf(str,"MG Level %d",lidx); 1587 PetscLogStageRegister(str, &gamg_stages[lidx]); 1588 } 1589 } 1590 #endif 1591 } 1592 #endif 1593 /* general events */ 1594 #if defined PETSC_USE_LOG 1595 PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG); 1596 PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO); 1597 PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG); 1598 PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO); 1599 PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG); 1600 PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO); 1601 PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG); 1602 PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG); 1603 #endif 1604 1605 /* instantiate derived type */ 1606 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 1607 { 1608 char tname[256] = GAMGAGG; 1609 ierr = PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType", 1610 GAMGList, tname, tname, sizeof(tname), PETSC_NULL ); 1611 CHKERRQ(ierr); 1612 ierr = PCGAMGSetType( pc, tname ); CHKERRQ(ierr); 1613 } 1614 ierr = PetscOptionsTail(); CHKERRQ(ierr); 1615 1616 PetscFunctionReturn(0); 1617 } 1618 EXTERN_C_END 1619