1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include "private/matimpl.h" 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <private/kspimpl.h> 7 8 #if defined PETSC_USE_LOG 9 PetscLogEvent gamg_setup_events[NUM_SET]; 10 #endif 11 #define GAMG_MAXLEVELS 30 12 13 /*#define GAMG_STAGES*/ 14 #if (defined PETSC_USE_LOG && defined GAMG_STAGES) 15 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 16 #endif 17 18 /* typedef enum { NOT_DONE=-2, DELETED=-1, REMOVED=-3 } NState; */ 19 /* use int instead of enum to facilitate passing them via Scatters */ 20 typedef int NState; 21 static const NState NOT_DONE=-2; 22 static const NState DELETED=-1; 23 static const NState REMOVED=-3; 24 25 #define IS_SELECTED(s) (s!=DELETED && s!=NOT_DONE && s!=REMOVED) 26 27 int compare (const void *a, const void *b) 28 { 29 return (((GAMGNode*)a)->degree - ((GAMGNode*)b)->degree); 30 } 31 32 static PetscFList GAMGList = 0; 33 34 /* -------------------------------------------------------------------------- */ 35 /* 36 createGraph 37 38 Input Parameter: 39 . pc - this 40 . Amat - original graph 41 Output Parameter: 42 . Gmat - output, filter Graph 43 . AuxMat - filter matrix for when 'square' 44 . permIS - perumutation IS (this should not be here) 45 */ 46 #undef __FUNCT__ 47 #define __FUNCT__ "createGraph" 48 PetscErrorCode createGraph(PC pc, const Mat Amat, Mat *a_Gmat, Mat *a_AuxMat, IS *a_permIS ) 49 { 50 PC_MG *mg = (PC_MG*)pc->data; 51 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 52 const PetscInt verbose = pc_gamg->verbose; 53 const PetscReal vfilter = pc_gamg->threshold; 54 PetscErrorCode ierr; 55 PetscInt Istart,Iend,Ii,jj,ncols,nloc,kk,nnz0,nnz1,NN,MM,bs; 56 PetscMPIInt mype, npe; 57 MPI_Comm wcomm = ((PetscObject)Amat)->comm; 58 PetscInt *d_nnz, *o_nnz; 59 Mat Gmat; 60 const PetscScalar *vals; 61 const PetscInt *idx; 62 63 PetscFunctionBegin; 64 ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr); 65 ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr); 66 ierr = MatGetOwnershipRange( Amat, &Istart, &Iend ); CHKERRQ(ierr); 67 ierr = MatGetSize( Amat, &MM, &NN ); CHKERRQ(ierr); 68 ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr); 69 nloc = (Iend-Istart)/bs; 70 71 #if defined PETSC_USE_LOG 72 ierr = PetscLogEventBegin(gamg_setup_events[GRAPH],0,0,0,0);CHKERRQ(ierr); 73 ierr = PetscLogEventBegin(gamg_setup_events[GRAPH_MAT],0,0,0,0);CHKERRQ(ierr); 74 #endif 75 /* count nnz, there is sparcity in here so this might not be enough */ 76 ierr = PetscMalloc( nloc*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr); 77 ierr = PetscMalloc( nloc*sizeof(PetscInt), &o_nnz ); CHKERRQ(ierr); 78 for ( Ii = Istart, nnz0 = jj = 0 ; Ii < Iend ; Ii += bs, jj++ ) { 79 ierr = MatGetRow(Amat,Ii,&ncols,0,0); CHKERRQ(ierr); 80 d_nnz[jj] = ncols; /* very pessimistic */ 81 o_nnz[jj] = ncols; 82 if( d_nnz[jj] > nloc ) d_nnz[jj] = nloc; 83 if( o_nnz[jj] > (NN/bs-nloc) ) o_nnz[jj] = NN/bs-nloc; 84 nnz0 += ncols; 85 ierr = MatRestoreRow(Amat,Ii,&ncols,0,0); CHKERRQ(ierr); 86 } 87 nnz0 /= (nloc+1); 88 89 /* get scalar copy (norms) of matrix */ 90 ierr = MatCreateMPIAIJ( wcomm, nloc, nloc, 91 PETSC_DETERMINE, PETSC_DETERMINE, 92 0, d_nnz, 0, o_nnz, &Gmat ); 93 94 for( Ii = Istart; Ii < Iend ; Ii++ ) { 95 PetscInt dest_row = Ii/bs; 96 ierr = MatGetRow(Amat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr); 97 for(jj=0;jj<ncols;jj++){ 98 PetscInt dest_col = idx[jj]/bs; 99 PetscScalar sv = PetscAbs(PetscRealPart(vals[jj])); 100 ierr = MatSetValues(Gmat,1,&dest_row,1,&dest_col,&sv,ADD_VALUES); CHKERRQ(ierr); 101 } 102 ierr = MatRestoreRow(Amat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr); 103 } 104 ierr = MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 105 ierr = MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 106 /* scale Gmat so filter works */ 107 { 108 Vec diag; 109 ierr = MatGetVecs( Gmat, &diag, 0 ); CHKERRQ(ierr); 110 ierr = MatGetDiagonal( Gmat, diag ); CHKERRQ(ierr); 111 ierr = VecReciprocal( diag ); CHKERRQ(ierr); 112 ierr = VecSqrtAbs( diag ); CHKERRQ(ierr); 113 ierr = MatDiagonalScale( Gmat, diag, diag );CHKERRQ(ierr); 114 ierr = VecDestroy( &diag ); CHKERRQ(ierr); 115 } 116 #if defined PETSC_USE_LOG 117 ierr = PetscLogEventEnd(gamg_setup_events[GRAPH_MAT],0,0,0,0); CHKERRQ(ierr); 118 ierr = PetscLogEventBegin(gamg_setup_events[GRAPH_FILTER],0,0,0,0);CHKERRQ(ierr); 119 #endif 120 121 ierr = MatGetOwnershipRange(Gmat,&Istart,&Iend);CHKERRQ(ierr); /* use AIJ from here */ 122 { 123 Mat Gmat2; 124 ierr = MatCreateMPIAIJ(wcomm,nloc,nloc,PETSC_DECIDE,PETSC_DECIDE,0,d_nnz,0,o_nnz,&Gmat2); 125 CHKERRQ(ierr); 126 for( Ii = Istart, nnz1 = 0 ; Ii < Iend; Ii++ ){ 127 ierr = MatGetRow(Gmat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr); 128 for(jj=0;jj<ncols;jj++){ 129 PetscScalar sv = PetscAbs(PetscRealPart(vals[jj])); 130 if( PetscRealPart(sv) > vfilter ) { 131 ierr = MatSetValues(Gmat2,1,&Ii,1,&idx[jj],&sv,INSERT_VALUES); CHKERRQ(ierr); 132 nnz1++; 133 } 134 } 135 ierr = MatRestoreRow(Gmat,Ii,&ncols,&idx,&vals); CHKERRQ(ierr); 136 } 137 nnz1 /= (nloc+1); 138 ierr = MatAssemblyBegin(Gmat2,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 139 ierr = MatAssemblyEnd(Gmat2,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 140 ierr = MatDestroy( &Gmat ); CHKERRQ(ierr); 141 Gmat = Gmat2; 142 if( verbose ) { 143 PetscPrintf(PETSC_COMM_WORLD,"\t%s ave nnz/row %d --> %d\n",__FUNCT__,nnz0,nnz1); 144 } 145 } 146 ierr = PetscFree( d_nnz ); CHKERRQ(ierr); 147 ierr = PetscFree( o_nnz ); CHKERRQ(ierr); 148 149 #if defined PETSC_USE_LOG 150 ierr = PetscLogEventEnd(gamg_setup_events[GRAPH_FILTER],0,0,0,0); CHKERRQ(ierr); 151 ierr = PetscLogEventBegin(gamg_setup_events[GRAPH_SQR],0,0,0,0);CHKERRQ(ierr); 152 #endif 153 /* square matrix - SA */ 154 if( a_AuxMat ){ 155 Mat Gmat2; 156 ierr = MatTransposeMatMult( Gmat, Gmat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Gmat2 ); 157 CHKERRQ(ierr); 158 *a_AuxMat = Gmat; 159 /* force compressed row storage for B matrix in AuxMat */ 160 if (npe > 1) { 161 Mat_MPIAIJ *mpimat = (Mat_MPIAIJ*)Gmat->data; 162 Mat_SeqAIJ *Bmat = (Mat_SeqAIJ*) mpimat->B->data; 163 Bmat->compressedrow.check = PETSC_TRUE; 164 ierr = MatCheckCompressedRow(mpimat->B,&Bmat->compressedrow,Bmat->i,Gmat->rmap->n,-1.0); 165 CHKERRQ(ierr); 166 assert( Bmat->compressedrow.use ); 167 } 168 Gmat = Gmat2; /* now work with the squared matrix (get forced soon) */ 169 } 170 171 #if defined PETSC_USE_LOG 172 ierr = PetscLogEventEnd(gamg_setup_events[GRAPH_SQR],0,0,0,0); CHKERRQ(ierr); 173 #endif 174 if (npe > 1) { 175 /* force compressed row storage for B matrix */ 176 Mat_MPIAIJ *mpimat = (Mat_MPIAIJ*)Gmat->data; 177 Mat_SeqAIJ *Bmat = (Mat_SeqAIJ*) mpimat->B->data; 178 Bmat->compressedrow.check = PETSC_TRUE; 179 ierr = MatCheckCompressedRow(mpimat->B,&Bmat->compressedrow,Bmat->i,Gmat->rmap->n,-1.0); 180 CHKERRQ(ierr); 181 assert( Bmat->compressedrow.use ); 182 } 183 184 /* create random permutation with sort for geo-mg -- this should be refactored, its sort of geo-mg specific */ 185 { 186 GAMGNode *gnodes; 187 PetscInt *permute; 188 189 ierr = PetscMalloc( nloc*sizeof(GAMGNode), &gnodes ); CHKERRQ(ierr); 190 ierr = PetscMalloc( nloc*sizeof(PetscInt), &permute ); CHKERRQ(ierr); 191 192 for (Ii=Istart; Ii<Iend; Ii++) { /* locals only? */ 193 ierr = MatGetRow(Gmat,Ii,&ncols,0,0); CHKERRQ(ierr); 194 { 195 PetscInt lid = Ii - Istart; 196 gnodes[lid].lid = lid; 197 gnodes[lid].degree = ncols; 198 } 199 ierr = MatRestoreRow(Gmat,Ii,&ncols,0,0); CHKERRQ(ierr); 200 } 201 /* randomize */ 202 srand(1); /* make deterministic */ 203 if( PETSC_TRUE ) { 204 PetscBool *bIndexSet; 205 ierr = PetscMalloc( nloc*sizeof(PetscBool), &bIndexSet ); CHKERRQ(ierr); 206 for ( Ii = 0; Ii < nloc ; Ii++) bIndexSet[Ii] = PETSC_FALSE; 207 for ( Ii = 0; Ii < nloc ; Ii++) 208 { 209 PetscInt iSwapIndex = rand()%nloc; 210 if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) 211 { 212 GAMGNode iTemp = gnodes[iSwapIndex]; 213 gnodes[iSwapIndex] = gnodes[Ii]; 214 gnodes[Ii] = iTemp; 215 bIndexSet[Ii] = PETSC_TRUE; 216 bIndexSet[iSwapIndex] = PETSC_TRUE; 217 } 218 } 219 ierr = PetscFree( bIndexSet ); CHKERRQ(ierr); 220 } 221 /* only sort locals */ 222 qsort( gnodes, nloc, sizeof(GAMGNode), compare ); 223 /* create IS of permutation */ 224 for(kk=0;kk<nloc;kk++) { /* locals only */ 225 permute[kk] = gnodes[kk].lid; 226 } 227 ierr = ISCreateGeneral( PETSC_COMM_SELF, (Iend-Istart), permute, PETSC_COPY_VALUES, a_permIS ); 228 CHKERRQ(ierr); 229 230 ierr = PetscFree( gnodes ); CHKERRQ(ierr); 231 ierr = PetscFree( permute ); CHKERRQ(ierr); 232 } 233 #if defined PETSC_USE_LOG 234 ierr = PetscLogEventEnd(gamg_setup_events[GRAPH],0,0,0,0); CHKERRQ(ierr); 235 #endif 236 *a_Gmat = Gmat; 237 238 PetscFunctionReturn(0); 239 } 240 241 /* -------------------------------------------------------------------------- */ 242 /* 243 getDataWithGhosts - hacks into Mat MPIAIJ so this must have > 1 pe 244 245 Input Parameter: 246 . Gmat - MPIAIJ matrix for scattters 247 . data_sz - number of data terms per node (# cols in output) 248 . data_in[nloc*data_sz] - column oriented data 249 Output Parameter: 250 . stride - numbrt of rows of output 251 . data_out[stride*data_sz] - output data with ghosts 252 */ 253 #undef __FUNCT__ 254 #define __FUNCT__ "getDataWithGhosts" 255 PetscErrorCode getDataWithGhosts( const Mat Gmat, 256 const PetscInt data_sz, 257 const PetscReal data_in[], 258 PetscInt *a_stride, 259 PetscReal **a_data_out 260 ) 261 { 262 PetscErrorCode ierr; 263 PetscMPIInt mype,npe; 264 MPI_Comm wcomm = ((PetscObject)Gmat)->comm; 265 Vec tmp_crds; 266 Mat_MPIAIJ *mpimat = (Mat_MPIAIJ*)Gmat->data; 267 PetscInt nnodes,num_ghosts,dir,kk,jj,my0,Iend,nloc; 268 PetscScalar *data_arr; 269 PetscReal *datas; 270 PetscBool isMPIAIJ; 271 272 PetscFunctionBegin; 273 ierr = PetscTypeCompare( (PetscObject)Gmat, MATMPIAIJ, &isMPIAIJ ); CHKERRQ(ierr); 274 assert(isMPIAIJ); 275 ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr); 276 ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr); assert(npe>1); 277 ierr = MatGetOwnershipRange( Gmat, &my0, &Iend ); CHKERRQ(ierr); 278 nloc = Iend - my0; 279 ierr = VecGetLocalSize( mpimat->lvec, &num_ghosts ); CHKERRQ(ierr); 280 nnodes = num_ghosts + nloc; 281 *a_stride = nnodes; 282 ierr = MatGetVecs( Gmat, &tmp_crds, 0 ); CHKERRQ(ierr); 283 284 ierr = PetscMalloc( data_sz*nnodes*sizeof(PetscReal), &datas); CHKERRQ(ierr); 285 for(dir=0; dir<data_sz; dir++) { 286 /* set local, and global */ 287 for(kk=0; kk<nloc; kk++) { 288 PetscInt gid = my0 + kk; 289 PetscScalar crd = (PetscScalar)data_in[dir*nloc + kk]; /* col oriented */ 290 datas[dir*nnodes + kk] = PetscRealPart(crd); 291 ierr = VecSetValues(tmp_crds, 1, &gid, &crd, INSERT_VALUES ); CHKERRQ(ierr); 292 } 293 ierr = VecAssemblyBegin( tmp_crds ); CHKERRQ(ierr); 294 ierr = VecAssemblyEnd( tmp_crds ); CHKERRQ(ierr); 295 /* get ghost datas */ 296 ierr = VecScatterBegin(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD); 297 CHKERRQ(ierr); 298 ierr = VecScatterEnd(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD); 299 CHKERRQ(ierr); 300 ierr = VecGetArray( mpimat->lvec, &data_arr ); CHKERRQ(ierr); 301 for(kk=nloc,jj=0;jj<num_ghosts;kk++,jj++){ 302 datas[dir*nnodes + kk] = PetscRealPart(data_arr[jj]); 303 } 304 ierr = VecRestoreArray( mpimat->lvec, &data_arr ); CHKERRQ(ierr); 305 } 306 ierr = VecDestroy(&tmp_crds); CHKERRQ(ierr); 307 308 *a_data_out = datas; 309 310 PetscFunctionReturn(0); 311 } 312 313 /* -------------------------------------------------------------------------- */ 314 /* 315 smoothAggs - 316 317 Input Parameter: 318 . Gmat_2 - glabal matrix of graph (data not defined) 319 . Gmat_1 320 . lid_state 321 Input/Output Parameter: 322 . id_llist - linked list rooted at selected node (size is nloc + nghosts_2) 323 . deleted_parent_gid 324 */ 325 #undef __FUNCT__ 326 #define __FUNCT__ "smoothAggs" 327 PetscErrorCode smoothAggs( const Mat Gmat_2, /* base (squared) graph */ 328 const Mat Gmat_1, /* base graph, could be unsymmetic */ 329 const PetscScalar lid_state[], /* [nloc], states */ 330 PetscInt id_llist[], /* [nloc+nghost_2], aggragate list */ 331 PetscScalar deleted_parent_gid[] /* [nloc], which pe owns my deleted */ 332 333 ) 334 { 335 PetscErrorCode ierr; 336 PetscBool isMPI; 337 Mat_SeqAIJ *matA_1, *matB_1=0; 338 MPI_Comm wcomm = ((PetscObject)Gmat_2)->comm; 339 PetscMPIInt mype; 340 PetscInt nghosts_2,nghosts_1,lid,*ii,n,*idx,j,ix,Iend,my0; 341 Mat_MPIAIJ *mpimat_2 = 0, *mpimat_1=0; 342 const PetscInt nloc = Gmat_2->rmap->n; 343 PetscScalar *cpcol_1_state; 344 345 PetscFunctionBegin; 346 ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr); 347 ierr = MatGetOwnershipRange(Gmat_1,&my0,&Iend); CHKERRQ(ierr); 348 349 if( !PETSC_TRUE ) { 350 PetscViewer viewer; char fname[32]; static int llev=0; 351 sprintf(fname,"Gmat1_%d.mat",llev++); 352 PetscViewerASCIIOpen(wcomm,fname,&viewer); 353 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 354 ierr = MatView(Gmat_1, viewer ); CHKERRQ(ierr); 355 ierr = PetscViewerDestroy( &viewer ); 356 } 357 358 /* get submatrices */ 359 ierr = PetscTypeCompare( (PetscObject)Gmat_1, MATMPIAIJ, &isMPI ); CHKERRQ(ierr); 360 361 if (isMPI) { 362 PetscInt *gids, gid; 363 PetscScalar *real_gids; 364 Vec tempVec; 365 366 ierr = PetscMalloc( nloc*sizeof(PetscInt), &gids ); CHKERRQ(ierr); 367 ierr = PetscMalloc( nloc*sizeof(PetscScalar), &real_gids ); CHKERRQ(ierr); 368 369 for(lid=0,gid=my0;lid<nloc;lid++,gid++){ 370 gids[lid] = gid; 371 real_gids[lid] = (PetscScalar)gid; 372 } 373 /* grab matrix objects */ 374 mpimat_2 = (Mat_MPIAIJ*)Gmat_2->data; 375 mpimat_1 = (Mat_MPIAIJ*)Gmat_1->data; 376 matA_1 = (Mat_SeqAIJ*)mpimat_1->A->data; 377 matB_1 = (Mat_SeqAIJ*)mpimat_1->B->data; 378 /* get ghost sizes */ 379 ierr = VecGetLocalSize( mpimat_1->lvec, &nghosts_1 ); CHKERRQ(ierr); 380 ierr = VecGetLocalSize( mpimat_2->lvec, &nghosts_2 ); CHKERRQ(ierr); 381 /* get 'cpcol_1_state' */ 382 ierr = MatGetVecs( Gmat_1, &tempVec, 0 ); CHKERRQ(ierr); 383 if(nloc>0){ 384 ierr = VecSetValues( tempVec, nloc, gids, lid_state, INSERT_VALUES ); CHKERRQ(ierr); 385 } 386 ierr = VecAssemblyBegin( tempVec ); CHKERRQ(ierr); 387 ierr = VecAssemblyEnd( tempVec ); CHKERRQ(ierr); 388 ierr = VecScatterBegin(mpimat_1->Mvctx,tempVec, mpimat_1->lvec,INSERT_VALUES,SCATTER_FORWARD); 389 CHKERRQ(ierr); 390 ierr = VecScatterEnd(mpimat_1->Mvctx,tempVec, mpimat_1->lvec,INSERT_VALUES,SCATTER_FORWARD); 391 CHKERRQ(ierr); 392 ierr = VecGetArray( mpimat_1->lvec, &cpcol_1_state ); CHKERRQ(ierr); 393 ierr = VecDestroy( &tempVec ); CHKERRQ(ierr); 394 395 ierr = PetscFree( gids ); CHKERRQ(ierr); 396 ierr = PetscFree( real_gids ); CHKERRQ(ierr); 397 } else { 398 PetscBool isAIJ; 399 ierr = PetscTypeCompare( (PetscObject)Gmat_2, MATSEQAIJ, &isAIJ ); CHKERRQ(ierr); 400 assert(isAIJ); 401 matA_1 = (Mat_SeqAIJ*)Gmat_1->data; 402 nghosts_2 = nghosts_1 = 0; 403 } 404 assert( matA_1 && !matA_1->compressedrow.use ); 405 assert( matB_1==0 || matB_1->compressedrow.use ); 406 407 { 408 PetscInt *lid_cprowID_1; 409 PetscInt *lid_sel_lid; 410 411 ierr = PetscMalloc( nloc*sizeof(PetscInt), &lid_cprowID_1 ); CHKERRQ(ierr); 412 ierr = PetscMalloc( nloc*sizeof(PetscInt), &lid_sel_lid ); CHKERRQ(ierr); 413 414 /*reverse map to selelected node */ 415 for(lid=0;lid<nloc;lid++) lid_cprowID_1[lid] = lid_sel_lid[lid] = -1; 416 for(lid=0;lid<nloc;lid++){ 417 NState state = (NState)PetscRealPart(lid_state[lid]); 418 if( IS_SELECTED(state) ){ 419 PetscInt flid = lid; 420 do{ 421 lid_sel_lid[flid] = lid; assert(flid<nloc); 422 } while( (flid=id_llist[flid]) != -1 ); 423 } 424 } 425 426 /* set index into compressed row 'lid_cprowID' */ 427 if( matB_1 ) { 428 ii = matB_1->compressedrow.i; 429 for (ix=0; ix<matB_1->compressedrow.nrows; ix++) { 430 PetscInt lid = matB_1->compressedrow.rindex[ix]; 431 lid_cprowID_1[lid] = ix; 432 } 433 } 434 435 for(lid=0;lid<nloc;lid++){ 436 NState state = (NState)PetscRealPart(lid_state[lid]); 437 if( IS_SELECTED(state) ) { 438 /* steal locals */ 439 ii = matA_1->i; n = ii[lid+1] - ii[lid]; 440 idx = matA_1->j + ii[lid]; 441 for (j=0; j<n; j++) { 442 PetscInt flid, lidj = idx[j]; 443 NState statej = (NState)PetscRealPart(lid_state[lidj]); 444 if( statej==DELETED && lid_sel_lid[lidj] != lid ){ /* steal it */ 445 if( lid_sel_lid[lidj] != -1 ){ 446 /* I'm stealing this local */ 447 PetscInt hav=0, flid2 = lid_sel_lid[lidj], lastid; assert(flid2!=-1); 448 for( lastid=flid2, flid=id_llist[flid2] ; flid!=-1 ; flid=id_llist[flid] ) { 449 if( flid == lidj ) { 450 id_llist[lastid] = id_llist[lidj]; /* remove lidj from list */ 451 id_llist[lidj] = id_llist[lid]; id_llist[lid] = lidj; /* insert 'lidj' into head of llist */ 452 hav++; 453 /* break; */ 454 } 455 lastid = flid; 456 } 457 if(hav!=1){ 458 flid2 = lid_sel_lid[lidj]; 459 if(hav!=0){ 460 SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB, 461 "found %d vertices. (if %d==0) failed to find self in 'selected' lists. probably structurally unsymmetric matrix", 462 hav,hav); 463 } 464 } 465 } 466 else{ 467 /* I'm stealing this local, owned by a ghost */ 468 deleted_parent_gid[lidj] = (PetscScalar)(lid+my0); /* this makes it a no-op later */ 469 lid_sel_lid[lidj] = lid; /* not needed */ 470 id_llist[lidj] = id_llist[lid]; id_llist[lid] = lidj; /* insert 'lidj' into head of llist */ 471 } 472 } 473 } 474 /* ghosts are done by 'DELETED' branch */ 475 } 476 else if( state == DELETED ) { 477 /* see if I have a selected ghost neighbors */ 478 if( (ix=lid_cprowID_1[lid]) != -1 ) { 479 PetscInt hav = 0, old_sel_lid = lid_sel_lid[lid], lastid; assert(old_sel_lid<nloc); 480 ii = matB_1->compressedrow.i; n = ii[ix+1] - ii[ix]; 481 idx = matB_1->j + ii[ix]; 482 for( j=0 ; j<n ; j++ ) { 483 PetscInt cpid = idx[j]; 484 NState statej = (NState)PetscRealPart(cpcol_1_state[cpid]); 485 if( IS_SELECTED(statej) ) { 486 PetscInt new_sel_gid = (PetscInt)statej, hv=0, flid; 487 hav++; 488 /* remove from list */ 489 if( old_sel_lid != -1 ) { 490 /* assert(deleted_parent_gid[lid]==-1.0 ); */ 491 for( lastid=old_sel_lid, flid=id_llist[old_sel_lid] ; flid != -1 ; flid=id_llist[flid] ) { 492 if( flid == lid ) { 493 id_llist[lastid] = id_llist[lid]; /* remove lid from 'old_sel_lid' list */ 494 hv++; 495 break; 496 } 497 lastid = flid; 498 } 499 /* assert(hv==1); */ 500 } 501 else { 502 assert(deleted_parent_gid[lid] != -1.0); /* stealing from one ghost, giving to another */ 503 } 504 505 /* this will get other proc to add this */ 506 deleted_parent_gid[lid] = (PetscScalar)new_sel_gid; 507 } 508 } 509 assert(hav <= 1); 510 } 511 } 512 } 513 514 ierr = PetscFree( lid_cprowID_1 ); CHKERRQ(ierr); 515 ierr = PetscFree( lid_sel_lid ); CHKERRQ(ierr); 516 } 517 518 if(isMPI) { 519 ierr = VecRestoreArray( mpimat_1->lvec, &cpcol_1_state ); CHKERRQ(ierr); 520 } 521 522 PetscFunctionReturn(0); 523 } 524 525 /* -------------------------------------------------------------------------- */ 526 /* 527 maxIndSetAgg - parallel maximal independent set (MIS) with data locality info. 528 529 Input Parameter: 530 . perm - serial permutation of rows of local to process in MIS 531 . Gmat - glabal matrix of graph (data not defined) 532 . Auxmat - non-squared matrix 533 . strict_aggs - flag for whether to keep strict (non overlapping) aggregates in 'llist'; 534 Output Parameter: 535 . a_selected - IS of selected vertices, includes 'ghost' nodes at end with natural local indices 536 . a_locals_llist - linked list of local nodes rooted at selected node (size is nloc + nghosts) 537 */ 538 #undef __FUNCT__ 539 #define __FUNCT__ "maxIndSetAgg" 540 PetscErrorCode maxIndSetAgg( const IS perm, 541 const Mat Gmat, 542 const Mat Auxmat, 543 const PetscBool strict_aggs, 544 IS *a_selected, 545 IS *a_locals_llist 546 ) 547 { 548 PetscErrorCode ierr; 549 PetscBool isMPI; 550 Mat_SeqAIJ *matA, *matB = 0; 551 MPI_Comm wcomm = ((PetscObject)Gmat)->comm; 552 Vec locState, ghostState; 553 PetscInt num_fine_ghosts,kk,n,ix,j,*idx,*ii,iter,Iend,my0; 554 Mat_MPIAIJ *mpimat = 0; 555 PetscScalar *cpcol_gid,*cpcol_state; 556 PetscMPIInt mype; 557 const PetscInt *perm_ix; 558 PetscInt nDone = 0, nselected = 0; 559 const PetscInt nloc = Gmat->rmap->n; 560 561 PetscFunctionBegin; 562 ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr); 563 /* get submatrices */ 564 ierr = PetscTypeCompare( (PetscObject)Gmat, MATMPIAIJ, &isMPI ); CHKERRQ(ierr); 565 if (isMPI) { 566 mpimat = (Mat_MPIAIJ*)Gmat->data; 567 matA = (Mat_SeqAIJ*)mpimat->A->data; 568 matB = (Mat_SeqAIJ*)mpimat->B->data; 569 } else { 570 PetscBool isAIJ; 571 ierr = PetscTypeCompare( (PetscObject)Gmat, MATSEQAIJ, &isAIJ ); CHKERRQ(ierr); 572 assert(isAIJ); 573 matA = (Mat_SeqAIJ*)Gmat->data; 574 } 575 assert( matA && !matA->compressedrow.use ); 576 assert( matB==0 || matB->compressedrow.use ); 577 /* get vector */ 578 ierr = MatGetVecs( Gmat, &locState, 0 ); CHKERRQ(ierr); 579 580 ierr = MatGetOwnershipRange(Gmat,&my0,&Iend); CHKERRQ(ierr); 581 582 if( mpimat ) { 583 PetscInt gid; 584 for(kk=0,gid=my0;kk<nloc;kk++,gid++) { 585 PetscScalar v = (PetscScalar)(gid); 586 ierr = VecSetValues( locState, 1, &gid, &v, INSERT_VALUES ); CHKERRQ(ierr); /* set with GID */ 587 } 588 ierr = VecAssemblyBegin( locState ); CHKERRQ(ierr); 589 ierr = VecAssemblyEnd( locState ); CHKERRQ(ierr); 590 ierr = VecScatterBegin(mpimat->Mvctx,locState,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 591 ierr = VecScatterEnd(mpimat->Mvctx,locState,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 592 ierr = VecGetArray( mpimat->lvec, &cpcol_gid ); CHKERRQ(ierr); /* get proc ID in 'cpcol_gid' */ 593 ierr = VecDuplicate( mpimat->lvec, &ghostState ); CHKERRQ(ierr); /* need 2nd compressed col. of off proc data */ 594 ierr = VecGetLocalSize( mpimat->lvec, &num_fine_ghosts ); CHKERRQ(ierr); 595 ierr = VecSet( ghostState, (PetscScalar)((PetscReal)NOT_DONE) ); CHKERRQ(ierr); /* set with UNKNOWN state */ 596 } 597 else num_fine_ghosts = 0; 598 599 { /* need an inverse map - locals */ 600 PetscInt *lid_cprowID, *lid_gid; 601 PetscScalar *deleted_parent_gid; /* only used for strict aggs */ 602 PetscInt *id_llist; /* linked list with locality info - output */ 603 PetscScalar *lid_state; 604 605 ierr = PetscMalloc( nloc*sizeof(PetscInt), &lid_cprowID ); CHKERRQ(ierr); 606 ierr = PetscMalloc( (nloc+1)*sizeof(PetscInt), &lid_gid ); CHKERRQ(ierr); 607 ierr = PetscMalloc( (nloc+1)*sizeof(PetscScalar), &deleted_parent_gid ); CHKERRQ(ierr); 608 ierr = PetscMalloc( (nloc+num_fine_ghosts)*sizeof(PetscInt), &id_llist ); CHKERRQ(ierr); 609 ierr = PetscMalloc( (nloc+1)*sizeof(PetscScalar), &lid_state ); CHKERRQ(ierr); 610 611 for(kk=0;kk<nloc;kk++) { 612 id_llist[kk] = -1; /* terminates linked lists */ 613 lid_cprowID[kk] = -1; 614 deleted_parent_gid[kk] = -1.0; 615 lid_gid[kk] = kk + my0; 616 lid_state[kk] = (PetscScalar)((PetscReal)NOT_DONE); 617 } 618 for(ix=0;kk<nloc+num_fine_ghosts;kk++,ix++) { 619 id_llist[kk] = -1; /* terminates linked lists */ 620 } 621 /* set index into cmpressed row 'lid_cprowID' */ 622 if( matB ) { 623 ii = matB->compressedrow.i; 624 for (ix=0; ix<matB->compressedrow.nrows; ix++) { 625 PetscInt lid = matB->compressedrow.rindex[ix]; 626 lid_cprowID[lid] = ix; 627 } 628 } 629 /* MIS */ 630 ierr = ISGetIndices( perm, &perm_ix ); CHKERRQ(ierr); 631 iter = 0; 632 while ( nDone < nloc || PETSC_TRUE ) { /* asyncronous not implemented */ 633 iter++; 634 if( mpimat ) { 635 ierr = VecGetArray( ghostState, &cpcol_state ); CHKERRQ(ierr); 636 } 637 /* check all vertices */ 638 for(kk=0;kk<nloc;kk++){ 639 PetscInt lid = perm_ix[kk]; 640 NState state = (NState)PetscRealPart(lid_state[lid]); 641 if( state == NOT_DONE ) { 642 /* parallel test, delete if selected ghost */ 643 PetscBool isOK = PETSC_TRUE; 644 if( (ix=lid_cprowID[lid]) != -1 ) { /* if I have any ghost neighbors */ 645 ii = matB->compressedrow.i; n = ii[ix+1] - ii[ix]; 646 idx = matB->j + ii[ix]; 647 for( j=0 ; j<n ; j++ ) { 648 PetscInt cpid = idx[j]; /* compressed row ID in B mat */ 649 PetscInt gid = (PetscInt)PetscRealPart(cpcol_gid[cpid]); 650 NState statej = (NState)PetscRealPart(cpcol_state[cpid]); 651 if( statej == NOT_DONE && gid >= Iend ) { /* should be (pe>mype), use gid as pe proxy */ 652 isOK = PETSC_FALSE; /* can not delete */ 653 } 654 else if( IS_SELECTED(statej) ) { /* lid is now deleted, do it */ 655 assert(0); 656 } 657 } 658 } /* parallel test */ 659 if( isOK ){ /* select or remove this vertex */ 660 nDone++; 661 /* check for singleton */ 662 ii = matA->i; n = ii[lid+1] - ii[lid]; 663 if( n < 2 ) { 664 /* if I have any ghost adj then not a sing */ 665 ix = lid_cprowID[lid]; 666 if( ix==-1 || (matB->compressedrow.i[ix+1]-matB->compressedrow.i[ix])==0 ){ 667 lid_state[lid] = (PetscScalar)((PetscReal)REMOVED); 668 continue; /* one local adj (me) and no ghost - singleton - flag and continue */ 669 } 670 } 671 /* SELECTED state encoded with global index */ 672 lid_state[lid] = (PetscScalar)(lid+my0); 673 nselected++; 674 /* delete local adj */ 675 idx = matA->j + ii[lid]; 676 for (j=0; j<n; j++) { 677 PetscInt lidj = idx[j]; 678 NState statej = (NState)PetscRealPart(lid_state[lidj]); 679 if( statej == NOT_DONE ){ 680 nDone++; 681 id_llist[lidj] = id_llist[lid]; id_llist[lid] = lidj; /* insert 'lidj' into head of llist */ 682 lid_state[lidj] = (PetscScalar)(PetscReal)DELETED; /* delete this */ 683 } 684 } 685 686 /* delete ghost adj - deleted ghost done later for aggregation */ 687 if( !strict_aggs ) { 688 if( (ix=lid_cprowID[lid]) != -1 ) { /* if I have any ghost neighbors */ 689 ii = matB->compressedrow.i; n = ii[ix+1] - ii[ix]; 690 idx = matB->j + ii[ix]; 691 for( j=0 ; j<n ; j++ ) { 692 PetscInt cpid = idx[j]; /* compressed row ID in B mat */ 693 NState statej = (NState)PetscRealPart(cpcol_state[cpid]); 694 assert( !IS_SELECTED(statej) ); 695 696 if( statej == NOT_DONE ) { 697 PetscInt lidj = nloc + cpid; 698 /* cpcol_state[cpid] = (PetscScalar)DELETED; this should happen later ... */ 699 id_llist[lidj] = id_llist[lid]; id_llist[lid] = lidj; /* insert 'lidj' into head of llist */ 700 } 701 } 702 } 703 } 704 705 } /* selected */ 706 } /* not done vertex */ 707 } /* vertex loop */ 708 709 /* update ghost states and count todos */ 710 if( mpimat ) { 711 PetscInt t1, t2; 712 ierr = VecRestoreArray( ghostState, &cpcol_state ); CHKERRQ(ierr); 713 /* put lid state in 'locState' */ 714 ierr = VecSetValues( locState, nloc, lid_gid, lid_state, INSERT_VALUES ); CHKERRQ(ierr); 715 ierr = VecAssemblyBegin( locState ); CHKERRQ(ierr); 716 ierr = VecAssemblyEnd( locState ); CHKERRQ(ierr); 717 /* scatter states, check for done */ 718 ierr = VecScatterBegin(mpimat->Mvctx,locState,ghostState,INSERT_VALUES,SCATTER_FORWARD); 719 CHKERRQ(ierr); 720 ierr = VecScatterEnd(mpimat->Mvctx,locState,ghostState,INSERT_VALUES,SCATTER_FORWARD); 721 CHKERRQ(ierr); 722 /* delete locals from selected ghosts */ 723 ierr = VecGetArray( ghostState, &cpcol_state ); CHKERRQ(ierr); 724 ii = matB->compressedrow.i; 725 for (ix=0; ix<matB->compressedrow.nrows; ix++) { 726 PetscInt lid = matB->compressedrow.rindex[ix]; 727 NState state = (NState)PetscRealPart(lid_state[lid]); 728 if( state == NOT_DONE ) { 729 /* look at ghosts */ 730 n = ii[ix+1] - ii[ix]; 731 idx = matB->j + ii[ix]; 732 for( j=0 ; j<n ; j++ ) { 733 PetscInt cpid = idx[j]; /* compressed row ID in B mat */ 734 NState statej = (NState)PetscRealPart(cpcol_state[cpid]); 735 if( IS_SELECTED(statej) ) { /* lid is now deleted, do it */ 736 PetscInt lidj = nloc + cpid; 737 nDone++; 738 lid_state[lid] = (PetscScalar)(PetscReal)DELETED; /* delete this */ 739 if( !strict_aggs ) { 740 id_llist[lid] = id_llist[lidj]; id_llist[lidj] = lid; /* insert 'lid' into head of ghost llist */ 741 } 742 else { 743 PetscInt gid = (PetscInt)PetscRealPart(cpcol_gid[cpid]); 744 deleted_parent_gid[lid] = (PetscScalar)gid; /* keep track of proc that I belong to */ 745 } 746 break; 747 } 748 } 749 } 750 } 751 ierr = VecRestoreArray( ghostState, &cpcol_state ); CHKERRQ(ierr); 752 753 /* all done? */ 754 t1 = nloc - nDone; assert(t1>=0); 755 ierr = MPI_Allreduce ( &t1, &t2, 1, MPIU_INT, MPIU_SUM, wcomm ); /* synchronous version */ 756 if( t2 == 0 ) break; 757 } 758 else break; /* all done */ 759 } /* outer parallel MIS loop */ 760 ierr = ISRestoreIndices(perm,&perm_ix); CHKERRQ(ierr); 761 762 if( mpimat ){ /* free this buffer up (not really needed here) */ 763 ierr = VecRestoreArray( mpimat->lvec, &cpcol_gid ); CHKERRQ(ierr); 764 } 765 766 /* adjust aggregates */ 767 if( strict_aggs ) { 768 ierr = smoothAggs(Gmat, Auxmat, lid_state, id_llist, deleted_parent_gid); 769 CHKERRQ(ierr); 770 } 771 772 /* tell adj who my deleted vertices belong to */ 773 if( strict_aggs && matB ) { 774 PetscScalar *cpcol_sel_gid; 775 PetscInt cpid; 776 /* get proc of deleted ghost */ 777 ierr = VecSetValues(locState, nloc, lid_gid, deleted_parent_gid, INSERT_VALUES); CHKERRQ(ierr); 778 ierr = VecAssemblyBegin(locState); CHKERRQ(ierr); 779 ierr = VecAssemblyEnd(locState); CHKERRQ(ierr); 780 ierr = VecScatterBegin(mpimat->Mvctx,locState,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 781 ierr = VecScatterEnd(mpimat->Mvctx,locState,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 782 ierr = VecGetArray( mpimat->lvec, &cpcol_sel_gid ); CHKERRQ(ierr); /* has pe that owns ghost */ 783 for(cpid=0; cpid<num_fine_ghosts; cpid++) { 784 PetscInt gid = (PetscInt)PetscRealPart(cpcol_sel_gid[cpid]); 785 if( gid >= my0 && gid < Iend ) { /* I own this deleted */ 786 PetscInt lidj = nloc + cpid; 787 PetscInt lid = gid - my0; 788 id_llist[lidj] = id_llist[lid]; id_llist[lid] = lidj; /* insert 'lidj' into head of llist */ 789 assert(IS_SELECTED( (NState)PetscRealPart(lid_state[lid]) )); 790 } 791 } 792 ierr = VecRestoreArray( mpimat->lvec, &cpcol_sel_gid ); CHKERRQ(ierr); 793 } 794 795 /* create output IS of aggregates in linked list */ 796 ierr = ISCreateGeneral(PETSC_COMM_SELF,nloc+num_fine_ghosts,id_llist,PETSC_COPY_VALUES,a_locals_llist); 797 CHKERRQ(ierr); 798 799 /* make 'a_selected' - output */ 800 if( mpimat ) { 801 ierr = VecGetArray( ghostState, &cpcol_state ); CHKERRQ(ierr); 802 } 803 for (j=0; j<num_fine_ghosts; j++) { 804 if( IS_SELECTED( (NState)PetscRealPart(cpcol_state[j]) ) ) nselected++; 805 } 806 { 807 PetscInt *selected_set; 808 ierr = PetscMalloc( nselected*sizeof(PetscInt), &selected_set ); CHKERRQ(ierr); 809 for(kk=0,j=0;kk<nloc;kk++){ 810 NState state = (NState)PetscRealPart(lid_state[kk]); 811 if( IS_SELECTED(state) ) { 812 selected_set[j++] = kk; 813 } 814 } 815 for (kk=0; kk<num_fine_ghosts; kk++) { 816 if( IS_SELECTED( (NState)PetscRealPart(cpcol_state[kk]) ) ) { 817 selected_set[j++] = nloc + kk; 818 } 819 } 820 assert(j==nselected); 821 ierr = ISCreateGeneral(PETSC_COMM_SELF, nselected, selected_set, PETSC_COPY_VALUES, a_selected ); 822 CHKERRQ(ierr); 823 ierr = PetscFree( selected_set ); CHKERRQ(ierr); 824 } 825 if( mpimat ) { 826 ierr = VecRestoreArray( ghostState, &cpcol_state ); CHKERRQ(ierr); 827 } 828 829 ierr = PetscFree( lid_cprowID ); CHKERRQ(ierr); 830 ierr = PetscFree( lid_gid ); CHKERRQ(ierr); 831 ierr = PetscFree( deleted_parent_gid ); CHKERRQ(ierr); 832 ierr = PetscFree( id_llist ); CHKERRQ(ierr); 833 ierr = PetscFree( lid_state ); CHKERRQ(ierr); 834 } /* scoping */ 835 836 if(mpimat){ 837 ierr = VecDestroy( &ghostState ); CHKERRQ(ierr); 838 } 839 840 ierr = VecDestroy( &locState ); CHKERRQ(ierr); 841 842 PetscFunctionReturn(0); 843 } 844 845 /* ----------------------------------------------------------------------------- */ 846 #undef __FUNCT__ 847 #define __FUNCT__ "PCReset_GAMG" 848 PetscErrorCode PCReset_GAMG(PC pc) 849 { 850 PetscErrorCode ierr; 851 PC_MG *mg = (PC_MG*)pc->data; 852 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 853 854 PetscFunctionBegin; 855 if( pc_gamg->data != 0 ) { /* this should not happen, cleaned up in SetUp */ 856 ierr = PetscFree(pc_gamg->data); CHKERRQ(ierr); 857 } 858 pc_gamg->data = 0; pc_gamg->data_sz = 0; 859 PetscFunctionReturn(0); 860 } 861 862 /* -------------------------------------------------------------------------- */ 863 /* 864 createLevel: create coarse op with RAP. repartition and/or reduce number 865 of active processors. 866 867 Input Parameter: 868 . pc - parameters 869 . Amat_fine - matrix on this fine (k) level 870 . ndata_rows - size of data to move (coarse grid) 871 . ndata_cols - size of data to move (coarse grid) 872 . cbs - coarse block size 873 . isLast - 874 In/Output Parameter: 875 . a_P_inout - prolongation operator to the next level (k-1) 876 . a_coarse_data - data that need to be moved 877 . a_nactive_proc - number of active procs 878 Output Parameter: 879 . a_Amat_crs - coarse matrix that is created (k-1) 880 */ 881 882 #undef __FUNCT__ 883 #define __FUNCT__ "createLevel" 884 PetscErrorCode createLevel( const PC pc, 885 const Mat Amat_fine, 886 const PetscInt ndata_rows, 887 const PetscInt ndata_cols, 888 const PetscInt cbs, 889 const PetscBool isLast, 890 Mat *a_P_inout, 891 PetscReal **a_coarse_data, 892 PetscMPIInt *a_nactive_proc, 893 Mat *a_Amat_crs 894 ) 895 { 896 PC_MG *mg = (PC_MG*)pc->data; 897 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 898 const PetscBool repart = pc_gamg->repart; 899 const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit; 900 PetscErrorCode ierr; 901 Mat Cmat,Pnew,Pold=*a_P_inout; 902 IS new_indices,isnum; 903 MPI_Comm wcomm = ((PetscObject)Amat_fine)->comm; 904 PetscMPIInt mype,npe,new_npe,nactive = *a_nactive_proc; 905 PetscInt neq,NN,Istart,Iend,Istart0,Iend0,ncrs_new,ncrs0,rfactor; 906 907 PetscFunctionBegin; 908 ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr); 909 ierr = MPI_Comm_size( wcomm, &npe ); CHKERRQ(ierr); 910 911 /* RAP */ 912 #ifdef USE_R 913 /* make R wih brute force for now */ 914 ierr = MatTranspose( Pold, Pnew ); 915 ierr = MatDestroy( &Pold ); CHKERRQ(ierr); 916 ierr = MatRARt( Amat_fine, Pnew, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr); 917 Pold = Pnew; 918 #else 919 ierr = MatPtAP( Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat ); CHKERRQ(ierr); 920 #endif 921 ierr = MatSetBlockSize( Cmat, cbs ); CHKERRQ(ierr); 922 ierr = MatGetOwnershipRange( Cmat, &Istart0, &Iend0 ); CHKERRQ(ierr); 923 ncrs0 = (Iend0-Istart0)/cbs; assert((Iend0-Istart0)%cbs == 0); 924 925 /* get number of PEs to make active, reduce */ 926 ierr = MatGetSize( Cmat, &neq, &NN ); CHKERRQ(ierr); 927 new_npe = (PetscMPIInt)((float)neq/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 928 if( new_npe == 0 || neq < coarse_max ) new_npe = 1; 929 else if (new_npe >= nactive ) new_npe = nactive; /* no change, rare */ 930 if( isLast ) new_npe = 1; 931 932 if( !repart && new_npe==nactive ) { 933 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction */ 934 } 935 else { 936 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates accordingly */ 937 const PetscInt *idx,data_sz=ndata_rows*ndata_cols; 938 const PetscInt stride0=ncrs0*ndata_rows; 939 PetscInt *counts,is_sz,*newproc_idx,ii,jj,kk,strideNew,*tidx,inpe,targetPE; 940 IS isnewproc; 941 VecScatter vecscat; 942 PetscScalar *array; 943 Vec src_crd, dest_crd; 944 PetscReal *data = *a_coarse_data; 945 IS isscat; 946 947 ierr = PetscMalloc( npe*sizeof(PetscInt), &counts ); CHKERRQ(ierr); 948 949 #if defined PETSC_USE_LOG 950 ierr = PetscLogEventBegin(gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 951 #endif 952 if( repart ) { 953 /* create sub communicator */ 954 Mat adj; 955 956 if (pc_gamg->verbose) PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,neq); 957 958 /* MatPartitioningApply call MatConvert, which is collective */ 959 if( cbs == 1 ) { 960 ierr = MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 961 } 962 else{ 963 /* make a scalar matrix to partition */ 964 Mat tMat; 965 PetscInt ncols,jj,Ii; 966 const PetscScalar *vals; 967 const PetscInt *idx; 968 PetscInt *d_nnz, *o_nnz; 969 static int llev = 0; 970 971 ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &d_nnz ); CHKERRQ(ierr); 972 ierr = PetscMalloc( ncrs0*sizeof(PetscInt), &o_nnz ); CHKERRQ(ierr); 973 for ( Ii = Istart0, jj = 0 ; Ii < Iend0 ; Ii += cbs, jj++ ) { 974 ierr = MatGetRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 975 d_nnz[jj] = ncols/cbs; 976 o_nnz[jj] = ncols/cbs; 977 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0); CHKERRQ(ierr); 978 if( d_nnz[jj] > ncrs0 ) d_nnz[jj] = ncrs0; 979 if( o_nnz[jj] > (neq/cbs-ncrs0) ) o_nnz[jj] = neq/cbs-ncrs0; 980 } 981 982 ierr = MatCreateMPIAIJ( wcomm, ncrs0, ncrs0, 983 PETSC_DETERMINE, PETSC_DETERMINE, 984 0, d_nnz, 0, o_nnz, 985 &tMat ); 986 CHKERRQ(ierr); 987 ierr = PetscFree( d_nnz ); CHKERRQ(ierr); 988 ierr = PetscFree( o_nnz ); CHKERRQ(ierr); 989 990 for ( ii = Istart0; ii < Iend0; ii++ ) { 991 PetscInt dest_row = ii/cbs; 992 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 993 for( jj = 0 ; jj < ncols ; jj++ ){ 994 PetscInt dest_col = idx[jj]/cbs; 995 PetscScalar v = 1.0; 996 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES); CHKERRQ(ierr); 997 } 998 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals); CHKERRQ(ierr); 999 } 1000 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1001 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1002 1003 if( llev++ == -1 ) { 1004 PetscViewer viewer; char fname[32]; 1005 sprintf(fname,"part_mat_%d.mat",llev); 1006 PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer); 1007 ierr = MatView( tMat, viewer ); CHKERRQ(ierr); 1008 ierr = PetscViewerDestroy( &viewer ); 1009 } 1010 1011 ierr = MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj ); CHKERRQ(ierr); 1012 1013 ierr = MatDestroy( &tMat ); CHKERRQ(ierr); 1014 } 1015 1016 { /* partition: get newproc_idx, set is_sz */ 1017 char prefix[256]; 1018 const char *pcpre; 1019 const PetscInt *is_idx; 1020 MatPartitioning mpart; 1021 IS proc_is; 1022 1023 ierr = MatPartitioningCreate( wcomm, &mpart ); CHKERRQ(ierr); 1024 ierr = MatPartitioningSetAdjacency( mpart, adj ); CHKERRQ(ierr); 1025 ierr = PCGetOptionsPrefix(pc,&pcpre);CHKERRQ(ierr); 1026 ierr = PetscSNPrintf(prefix,sizeof prefix,"%spc_gamg_",pcpre?pcpre:"");CHKERRQ(ierr); 1027 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 1028 ierr = MatPartitioningSetFromOptions( mpart ); CHKERRQ(ierr); 1029 ierr = MatPartitioningSetNParts( mpart, new_npe );CHKERRQ(ierr); 1030 ierr = MatPartitioningApply( mpart, &proc_is ); CHKERRQ(ierr); 1031 ierr = MatPartitioningDestroy( &mpart ); CHKERRQ(ierr); 1032 1033 /* collect IS info */ 1034 ierr = ISGetLocalSize( proc_is, &is_sz ); CHKERRQ(ierr); 1035 ierr = PetscMalloc( cbs*is_sz*sizeof(PetscInt), &newproc_idx ); CHKERRQ(ierr); 1036 ierr = ISGetIndices( proc_is, &is_idx ); CHKERRQ(ierr); 1037 NN = 1; /* bring to "front" of machine */ 1038 /*NN = npe/new_npe;*/ /* spread partitioning across machine */ 1039 for( kk = jj = 0 ; kk < is_sz ; kk++ ){ 1040 for( ii = 0 ; ii < cbs ; ii++, jj++ ){ 1041 newproc_idx[jj] = is_idx[kk] * NN; /* distribution */ 1042 } 1043 } 1044 ierr = ISRestoreIndices( proc_is, &is_idx ); CHKERRQ(ierr); 1045 ierr = ISDestroy( &proc_is ); CHKERRQ(ierr); 1046 1047 is_sz *= cbs; 1048 } 1049 ierr = MatDestroy( &adj ); CHKERRQ(ierr); 1050 1051 ierr = ISCreateGeneral( wcomm, is_sz, newproc_idx, PETSC_COPY_VALUES, &isnewproc ); 1052 CHKERRQ(ierr); 1053 if( newproc_idx != 0 ) { 1054 ierr = PetscFree( newproc_idx ); CHKERRQ(ierr); 1055 } 1056 } 1057 else { /* simple aggreagtion of parts */ 1058 /* find factor */ 1059 if( new_npe == 1 ) rfactor = npe; /* easy */ 1060 else { 1061 PetscReal best_fact = 0.; 1062 jj = -1; 1063 for( kk = 1 ; kk <= npe ; kk++ ){ 1064 if( npe%kk==0 ) { /* a candidate */ 1065 PetscReal nactpe = (PetscReal)npe/(PetscReal)kk, fact = nactpe/(PetscReal)new_npe; 1066 if(fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 1067 if( fact > best_fact ) { 1068 best_fact = fact; jj = kk; 1069 } 1070 } 1071 } 1072 if(jj!=-1) rfactor = jj; 1073 else rfactor = 1; /* prime? */ 1074 } 1075 new_npe = npe/rfactor; 1076 1077 if( new_npe==nactive ) { 1078 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction */ 1079 ierr = PetscFree( counts ); CHKERRQ(ierr); 1080 *a_nactive_proc = new_npe; /* output */ 1081 if (pc_gamg->verbose) PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s aggregate processors noop: new_npe=%d, neq=%d\n",mype,__FUNCT__,new_npe,neq); 1082 PetscFunctionReturn(0); 1083 } 1084 1085 /* reduce - isnewproc */ 1086 targetPE = mype/rfactor; 1087 1088 if (pc_gamg->verbose) PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s aggregate processors: npe: %d --> %d, neq=%d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,neq); 1089 is_sz = ncrs0*cbs; 1090 ierr = ISCreateStride( wcomm, is_sz, targetPE, 0, &isnewproc ); 1091 CHKERRQ(ierr); 1092 } 1093 1094 /* 1095 Create an index set from the isnewproc index set to indicate the mapping TO 1096 */ 1097 ierr = ISPartitioningToNumbering( isnewproc, &isnum ); CHKERRQ(ierr); 1098 /* 1099 Determine how many elements are assigned to each processor 1100 */ 1101 inpe = npe; 1102 ierr = ISPartitioningCount( isnewproc, inpe, counts ); CHKERRQ(ierr); 1103 ierr = ISDestroy( &isnewproc ); CHKERRQ(ierr); 1104 ncrs_new = counts[mype]/cbs; 1105 strideNew = ncrs_new*ndata_rows; 1106 #if defined PETSC_USE_LOG 1107 ierr = PetscLogEventEnd(gamg_setup_events[SET12],0,0,0,0); CHKERRQ(ierr); 1108 #endif 1109 /* Create a vector to contain the newly ordered element information */ 1110 ierr = VecCreate( wcomm, &dest_crd ); 1111 ierr = VecSetSizes( dest_crd, data_sz*ncrs_new, PETSC_DECIDE ); CHKERRQ(ierr); 1112 ierr = VecSetFromOptions( dest_crd ); CHKERRQ(ierr); /* this is needed! */ 1113 /* 1114 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 1115 a block size of ...). Note, ISs are expanded into equation space by 'cbs'. 1116 */ 1117 ierr = PetscMalloc( (ncrs0*data_sz)*sizeof(PetscInt), &tidx ); CHKERRQ(ierr); 1118 ierr = ISGetIndices( isnum, &idx ); CHKERRQ(ierr); 1119 for(ii=0,jj=0; ii<ncrs0 ; ii++) { 1120 PetscInt id = idx[ii*cbs]/cbs; /* get node back */ 1121 for( kk=0; kk<data_sz ; kk++, jj++) tidx[jj] = id*data_sz + kk; 1122 } 1123 ierr = ISRestoreIndices( isnum, &idx ); CHKERRQ(ierr); 1124 ierr = ISCreateGeneral( wcomm, data_sz*ncrs0, tidx, PETSC_COPY_VALUES, &isscat ); 1125 CHKERRQ(ierr); 1126 ierr = PetscFree( tidx ); CHKERRQ(ierr); 1127 /* 1128 Create a vector to contain the original vertex information for each element 1129 */ 1130 ierr = VecCreateSeq( PETSC_COMM_SELF, data_sz*ncrs0, &src_crd ); CHKERRQ(ierr); 1131 for( jj=0; jj<ndata_cols ; jj++ ) { 1132 for( ii=0 ; ii<ncrs0 ; ii++) { 1133 for( kk=0; kk<ndata_rows ; kk++ ) { 1134 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*data_sz + kk*ndata_cols + jj; 1135 PetscScalar tt = (PetscScalar)data[ix]; 1136 ierr = VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES ); CHKERRQ(ierr); 1137 } 1138 } 1139 } 1140 ierr = VecAssemblyBegin(src_crd); CHKERRQ(ierr); 1141 ierr = VecAssemblyEnd(src_crd); CHKERRQ(ierr); 1142 /* 1143 Scatter the element vertex information (still in the original vertex ordering) 1144 to the correct processor 1145 */ 1146 ierr = VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat); 1147 CHKERRQ(ierr); 1148 ierr = ISDestroy( &isscat ); CHKERRQ(ierr); 1149 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1150 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1151 ierr = VecScatterDestroy( &vecscat ); CHKERRQ(ierr); 1152 ierr = VecDestroy( &src_crd ); CHKERRQ(ierr); 1153 /* 1154 Put the element vertex data into a new allocation of the gdata->ele 1155 */ 1156 ierr = PetscFree( *a_coarse_data ); CHKERRQ(ierr); 1157 ierr = PetscMalloc( data_sz*ncrs_new*sizeof(PetscReal), a_coarse_data ); CHKERRQ(ierr); 1158 1159 ierr = VecGetArray( dest_crd, &array ); CHKERRQ(ierr); 1160 data = *a_coarse_data; 1161 for( jj=0; jj<ndata_cols ; jj++ ) { 1162 for( ii=0 ; ii<ncrs_new ; ii++) { 1163 for( kk=0; kk<ndata_rows ; kk++ ) { 1164 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*data_sz + kk*ndata_cols + jj; 1165 data[ix] = PetscRealPart(array[jx]); 1166 array[jx] = 1.e300; 1167 } 1168 } 1169 } 1170 ierr = VecRestoreArray( dest_crd, &array ); CHKERRQ(ierr); 1171 ierr = VecDestroy( &dest_crd ); CHKERRQ(ierr); 1172 #if defined PETSC_USE_LOG 1173 ierr = PetscLogEventBegin(gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 1174 #endif 1175 /* 1176 Invert for MatGetSubMatrix 1177 */ 1178 ierr = ISInvertPermutation( isnum, ncrs_new*cbs, &new_indices ); CHKERRQ(ierr); 1179 ierr = ISSort( new_indices ); CHKERRQ(ierr); /* is this needed? */ 1180 ierr = ISDestroy( &isnum ); CHKERRQ(ierr); 1181 #if defined PETSC_USE_LOG 1182 ierr = PetscLogEventEnd(gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 1183 ierr = PetscLogEventBegin(gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 1184 #endif 1185 /* A_crs output */ 1186 ierr = MatGetSubMatrix( Cmat, new_indices, new_indices, MAT_INITIAL_MATRIX, a_Amat_crs ); 1187 CHKERRQ(ierr); 1188 1189 ierr = MatDestroy( &Cmat ); CHKERRQ(ierr); 1190 Cmat = *a_Amat_crs; /* output */ 1191 ierr = MatSetBlockSize( Cmat, cbs ); CHKERRQ(ierr); 1192 #if defined PETSC_USE_LOG 1193 ierr = PetscLogEventEnd(gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 1194 #endif 1195 /* prolongator */ 1196 ierr = MatGetOwnershipRange( Pold, &Istart, &Iend ); CHKERRQ(ierr); 1197 { 1198 IS findices; 1199 #if defined PETSC_USE_LOG 1200 ierr = PetscLogEventBegin(gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 1201 #endif 1202 ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices); CHKERRQ(ierr); 1203 #ifdef USE_R 1204 ierr = MatGetSubMatrix( Pold, new_indices, findices, MAT_INITIAL_MATRIX, &Pnew ); 1205 #else 1206 ierr = MatGetSubMatrix( Pold, findices, new_indices, MAT_INITIAL_MATRIX, &Pnew ); 1207 #endif 1208 CHKERRQ(ierr); 1209 ierr = ISDestroy( &findices ); CHKERRQ(ierr); 1210 #if defined PETSC_USE_LOG 1211 ierr = PetscLogEventEnd(gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 1212 #endif 1213 } 1214 ierr = MatDestroy( a_P_inout ); CHKERRQ(ierr); 1215 *a_P_inout = Pnew; /* output - repartitioned */ 1216 ierr = ISDestroy( &new_indices ); CHKERRQ(ierr); 1217 ierr = PetscFree( counts ); CHKERRQ(ierr); 1218 } 1219 1220 *a_nactive_proc = new_npe; /* output */ 1221 1222 PetscFunctionReturn(0); 1223 } 1224 1225 /* -------------------------------------------------------------------------- */ 1226 /* 1227 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 1228 by setting data structures and options. 1229 1230 Input Parameter: 1231 . pc - the preconditioner context 1232 1233 Application Interface Routine: PCSetUp() 1234 1235 Notes: 1236 The interface routine PCSetUp() is not usually called directly by 1237 the user, but instead is called by PCApply() if necessary. 1238 */ 1239 #undef __FUNCT__ 1240 #define __FUNCT__ "PCSetUp_GAMG" 1241 PetscErrorCode PCSetUp_GAMG( PC pc ) 1242 { 1243 PetscErrorCode ierr; 1244 PC_MG *mg = (PC_MG*)pc->data; 1245 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1246 PC_MG_Levels **mglevels = mg->levels; 1247 Mat Amat = pc->mat, Pmat = pc->pmat; 1248 PetscInt fine_level, level, level1, M, N, bs, nloc, lidx, Istart, Iend; 1249 MPI_Comm wcomm = ((PetscObject)pc)->comm; 1250 PetscMPIInt mype,npe,nactivepe; 1251 Mat Aarr[GAMG_MAXLEVELS], Parr[GAMG_MAXLEVELS]; 1252 PetscReal *coarse_data = 0, *data, emaxs[GAMG_MAXLEVELS]; 1253 MatInfo info; 1254 1255 PetscFunctionBegin; 1256 pc_gamg->setup_count++; 1257 assert(pc_gamg->createprolongator); 1258 1259 if( pc->setupcalled > 0 ) { 1260 /* just do Galerkin grids */ 1261 Mat B,dA,dB; 1262 1263 /* PCSetUp_MG seems to insists on setting this to GMRES */ 1264 ierr = KSPSetType( mglevels[0]->smoothd, KSPPREONLY ); CHKERRQ(ierr); 1265 1266 if( pc_gamg->Nlevels > 1 ) { 1267 /* currently only handle case where mat and pmat are the same on coarser levels */ 1268 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);CHKERRQ(ierr); 1269 /* (re)set to get dirty flag */ 1270 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 1271 ierr = KSPSetUp( mglevels[pc_gamg->Nlevels-1]->smoothd ); CHKERRQ(ierr); 1272 1273 for (level=pc_gamg->Nlevels-2; level>-1; level--) { 1274 ierr = KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);CHKERRQ(ierr); 1275 /* the first time through the matrix structure has changed from repartitioning */ 1276 if( pc_gamg->setup_count == 2 ) { 1277 ierr = MatDestroy( &B ); CHKERRQ(ierr); 1278 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 1279 mglevels[level]->A = B; 1280 } 1281 else { 1282 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 1283 } 1284 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN); CHKERRQ(ierr); 1285 dB = B; 1286 /* setup KSP/PC */ 1287 ierr = KSPSetUp( mglevels[level]->smoothd ); CHKERRQ(ierr); 1288 } 1289 } 1290 1291 pc->setupcalled = 2; 1292 1293 PetscFunctionReturn(0); 1294 } 1295 1296 ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr); 1297 ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr); 1298 /* GAMG requires input of fine-grid matrix. It determines nlevels. */ 1299 ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ(ierr); 1300 ierr = MatGetSize( Amat, &M, &N );CHKERRQ(ierr); 1301 ierr = MatGetOwnershipRange( Amat, &Istart, &Iend ); CHKERRQ(ierr); 1302 nloc = (Iend-Istart)/bs; assert((Iend-Istart)%bs == 0); 1303 1304 if( pc_gamg->data == 0 && nloc > 0 ) { 1305 if(!pc_gamg->createdefaultdata){ 1306 SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_LIB,"GEO MG needs coordinates"); 1307 } 1308 ierr = pc_gamg->createdefaultdata( pc ); CHKERRQ(ierr); 1309 } 1310 data = pc_gamg->data; 1311 1312 /* Get A_i and R_i */ 1313 ierr = MatGetInfo(Amat,MAT_GLOBAL_SUM,&info); CHKERRQ(ierr); 1314 if (pc_gamg->verbose) PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 1315 mype,__FUNCT__,0,N,pc_gamg->data_rows,pc_gamg->data_cols, 1316 (int)(info.nz_used/(PetscReal)N),npe); 1317 for ( level=0, Aarr[0] = Pmat, nactivepe = npe; /* hard wired stopping logic */ 1318 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (npe==1 || nactivepe>1); */ 1319 level++ ){ 1320 level1 = level + 1; 1321 #if (defined PETSC_USE_LOG && defined GAMG_STAGES) 1322 ierr = PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr ); 1323 #endif 1324 #if defined PETSC_USE_LOG 1325 ierr = PetscLogEventBegin(gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 1326 #endif 1327 ierr = pc_gamg->createprolongator( pc, Aarr[level], data, &Parr[level1], &coarse_data ); 1328 CHKERRQ(ierr); 1329 /* get new block size of coarse matrices */ 1330 if( pc_gamg->col_bs_id != -1 && Parr[level1] ){ 1331 PetscBool flag; 1332 ierr = PetscObjectComposedDataGetInt( (PetscObject)Parr[level1], pc_gamg->col_bs_id, bs, flag ); 1333 CHKERRQ( ierr ); 1334 } 1335 /* cache eigen estimate */ 1336 if( pc_gamg->emax_id != -1 ){ 1337 PetscBool flag; 1338 ierr = PetscObjectComposedDataGetScalar( (PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag ); 1339 CHKERRQ( ierr ); 1340 if( !flag ) emaxs[level] = -1.; 1341 } 1342 else emaxs[level] = -1.; 1343 1344 ierr = PetscFree( data ); CHKERRQ( ierr ); 1345 #if defined PETSC_USE_LOG 1346 ierr = PetscLogEventEnd(gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 1347 #endif 1348 if(level==0) Aarr[0] = Amat; /* use Pmat for finest level setup, but use mat for solver */ 1349 if( Parr[level1] ) { 1350 #if defined PETSC_USE_LOG 1351 ierr = PetscLogEventBegin(gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 1352 #endif 1353 ierr = createLevel( pc, Aarr[level], 1354 pc_gamg->data_rows, 1355 pc_gamg->data_cols, bs, 1356 (PetscBool)(level==pc_gamg->Nlevels-2), 1357 &Parr[level1], &coarse_data, &nactivepe, &Aarr[level1] ); 1358 CHKERRQ(ierr); 1359 #if defined PETSC_USE_LOG 1360 ierr = PetscLogEventEnd(gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 1361 #endif 1362 ierr = MatGetSize( Aarr[level1], &M, &N );CHKERRQ(ierr); 1363 ierr = MatGetInfo( Aarr[level1], MAT_GLOBAL_SUM, &info ); CHKERRQ(ierr); 1364 if (pc_gamg->verbose) PetscPrintf(PETSC_COMM_WORLD,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 1365 mype,__FUNCT__,(int)level1,N,pc_gamg->data_cols, 1366 (int)(info.nz_used/(PetscReal)N),nactivepe); 1367 /* coarse grids with SA can have zero row/cols from singleton aggregates */ 1368 1369 /* stop if one node */ 1370 if( M/pc_gamg->data_cols < 2 ) { 1371 level++; 1372 break; 1373 } 1374 1375 if (PETSC_FALSE) { 1376 Vec diag; PetscScalar *data_arr,v; PetscInt Istart,Iend,kk,nloceq,id; 1377 v = 1.e-10; /* LU factor has hard wired numbers for small diags so this needs to match (yuk) */ 1378 ierr = MatGetOwnershipRange(Aarr[level1], &Istart, &Iend); CHKERRQ(ierr); 1379 nloceq = Iend-Istart; 1380 ierr = MatGetVecs( Aarr[level1], &diag, 0 ); CHKERRQ(ierr); 1381 ierr = MatGetDiagonal( Aarr[level1], diag ); CHKERRQ(ierr); 1382 ierr = VecGetArray( diag, &data_arr ); CHKERRQ(ierr); 1383 for(kk=0;kk<nloceq;kk++){ 1384 if(data_arr[kk]==0.0) { 1385 id = kk + Istart; 1386 ierr = MatSetValues(Aarr[level1],1,&id,1,&id,&v,INSERT_VALUES); 1387 CHKERRQ(ierr); 1388 PetscPrintf(PETSC_COMM_SELF,"\t[%d]%s warning: added zero to diag (%d) on level %d \n",mype,__FUNCT__,id,level1); 1389 } 1390 } 1391 ierr = VecRestoreArray( diag, &data_arr ); CHKERRQ(ierr); 1392 ierr = VecDestroy( &diag ); CHKERRQ(ierr); 1393 ierr = MatAssemblyBegin(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1394 ierr = MatAssemblyEnd(Aarr[level1],MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1395 } 1396 } else { 1397 coarse_data = 0; 1398 break; 1399 } 1400 data = coarse_data; 1401 1402 #if (defined PETSC_USE_LOG && defined GAMG_STAGES) 1403 ierr = PetscLogStagePop(); CHKERRQ( ierr ); 1404 #endif 1405 } 1406 if( coarse_data ) { 1407 ierr = PetscFree( coarse_data ); CHKERRQ( ierr ); 1408 } 1409 if (pc_gamg->verbose) PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s %d levels\n",0,__FUNCT__,level + 1); 1410 pc_gamg->data = 0; /* destroyed coordinate data */ 1411 pc_gamg->Nlevels = level + 1; 1412 fine_level = level; 1413 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,PETSC_NULL);CHKERRQ(ierr); 1414 1415 if( pc_gamg->Nlevels > 1 ) { /* don't setup MG if */ 1416 /* set default smoothers */ 1417 for ( lidx = 1, level = pc_gamg->Nlevels-2; 1418 lidx <= fine_level; 1419 lidx++, level--) { 1420 PetscReal emax, emin; 1421 KSP smoother; PC subpc; 1422 PetscBool isCheb; 1423 /* set defaults */ 1424 ierr = PCMGGetSmoother( pc, lidx, &smoother ); CHKERRQ(ierr); 1425 ierr = KSPSetType( smoother, KSPCHEBYCHEV );CHKERRQ(ierr); 1426 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 1427 /* ierr = KSPSetTolerances(smoother,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,2); CHKERRQ(ierr); */ 1428 ierr = PCSetType( subpc, PCJACOBI ); CHKERRQ(ierr); 1429 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 1430 /* overide defaults with input parameters */ 1431 ierr = KSPSetFromOptions( smoother ); CHKERRQ(ierr); 1432 1433 ierr = KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 1434 /* do my own cheby */ 1435 ierr = PetscTypeCompare( (PetscObject)smoother, KSPCHEBYCHEV, &isCheb ); CHKERRQ(ierr); 1436 1437 if( isCheb ) { 1438 ierr = PetscTypeCompare( (PetscObject)subpc, PCJACOBI, &isCheb ); CHKERRQ(ierr); 1439 if( isCheb && emaxs[level] > 0.0 ) emax=emaxs[level]; /* eigen estimate only for diagnal PC */ 1440 else{ /* eigen estimate 'emax' */ 1441 KSP eksp; Mat Lmat = Aarr[level]; 1442 Vec bb, xx; PC pc; 1443 const PCType type; 1444 1445 ierr = PCGetType( subpc, &type ); CHKERRQ(ierr); 1446 ierr = MatGetVecs( Lmat, &bb, 0 ); CHKERRQ(ierr); 1447 ierr = MatGetVecs( Lmat, &xx, 0 ); CHKERRQ(ierr); 1448 { 1449 PetscRandom rctx; 1450 ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr); 1451 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 1452 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 1453 ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr); 1454 } 1455 ierr = KSPCreate(wcomm,&eksp);CHKERRQ(ierr); 1456 ierr = KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 ); 1457 CHKERRQ(ierr); 1458 ierr = KSPSetNormType( eksp, KSP_NORM_NONE ); CHKERRQ(ierr); 1459 1460 ierr = KSPAppendOptionsPrefix( eksp, "est_"); CHKERRQ(ierr); 1461 ierr = KSPSetFromOptions( eksp ); CHKERRQ(ierr); 1462 1463 ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr); 1464 ierr = KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr ); 1465 ierr = KSPGetPC( eksp, &pc );CHKERRQ( ierr ); 1466 ierr = PCSetType( pc, type ); CHKERRQ(ierr); /* should be same as eigen estimates op. */ 1467 1468 ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr); 1469 1470 /* solve - keep stuff out of logging */ 1471 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 1472 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 1473 ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr); 1474 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 1475 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 1476 1477 ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr); 1478 1479 ierr = VecDestroy( &xx ); CHKERRQ(ierr); 1480 ierr = VecDestroy( &bb ); CHKERRQ(ierr); 1481 ierr = KSPDestroy( &eksp ); CHKERRQ(ierr); 1482 1483 if (pc_gamg->verbose) { 1484 PetscPrintf(PETSC_COMM_WORLD,"\t\t\t%s PC setup max eigen=%e min=%e\n", 1485 __FUNCT__,emax,emin); 1486 } 1487 } 1488 { 1489 PetscInt N1, N0, tt; 1490 ierr = MatGetSize( Aarr[level], &N1, &tt ); CHKERRQ(ierr); 1491 ierr = MatGetSize( Aarr[level+1], &N0, &tt ); CHKERRQ(ierr); 1492 /* heuristic - is this crap? */ 1493 emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); 1494 emax *= 1.05; 1495 } 1496 ierr = KSPChebychevSetEigenvalues( smoother, emax, emin );CHKERRQ(ierr); 1497 } 1498 } 1499 { 1500 /* coarse grid */ 1501 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 1502 Mat Lmat = Aarr[pc_gamg->Nlevels-1]; 1503 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 1504 ierr = KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 1505 ierr = KSPSetNormType( smoother, KSP_NORM_NONE ); CHKERRQ(ierr); 1506 ierr = KSPGetPC( smoother, &subpc ); CHKERRQ(ierr); 1507 ierr = PCSetType( subpc, PCBJACOBI ); CHKERRQ(ierr); 1508 ierr = PCSetUp( subpc ); CHKERRQ(ierr); 1509 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 1510 assert(ii==1); 1511 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 1512 ierr = PCSetType( pc2, PCLU ); CHKERRQ(ierr); 1513 } 1514 1515 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 1516 ierr = PCSetFromOptions_MG(pc); CHKERRQ(ierr); 1517 { 1518 PetscBool galerkin; 1519 ierr = PCMGGetGalerkin( pc, &galerkin); CHKERRQ(ierr); 1520 if(galerkin){ 1521 SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "GAMG does galerkin manually so it must not be used in PC_MG."); 1522 } 1523 } 1524 1525 /* set interpolation between the levels, clean up */ 1526 for (lidx=0,level=pc_gamg->Nlevels-1; 1527 lidx<fine_level; 1528 lidx++, level--){ 1529 ierr = PCMGSetInterpolation( pc, lidx+1, Parr[level] );CHKERRQ(ierr); 1530 ierr = MatDestroy( &Parr[level] ); CHKERRQ(ierr); 1531 ierr = MatDestroy( &Aarr[level] ); CHKERRQ(ierr); 1532 } 1533 1534 /* setupcalled is set to 0 so that MG is setup from scratch */ 1535 pc->setupcalled = 0; 1536 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 1537 pc->setupcalled = 1; /* use 1 as signal that this has not been re-setup */ 1538 1539 { 1540 KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */ 1541 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 1542 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 1543 ierr = KSPSetUp( smoother ); CHKERRQ(ierr); 1544 } 1545 } 1546 else { 1547 KSP smoother; 1548 if (pc_gamg->verbose) PetscPrintf(PETSC_COMM_WORLD,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",mype,__FUNCT__); 1549 ierr = PCMGGetSmoother( pc, 0, &smoother ); CHKERRQ(ierr); 1550 ierr = KSPSetOperators( smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN ); CHKERRQ(ierr); 1551 ierr = KSPSetType( smoother, KSPPREONLY ); CHKERRQ(ierr); 1552 /* setupcalled is set to 0 so that MG is setup from scratch */ 1553 pc->setupcalled = 0; 1554 ierr = PCSetUp_MG( pc );CHKERRQ( ierr ); 1555 pc->setupcalled = 1; /* use 1 as signal that this has not been re-setup */ 1556 } 1557 PetscFunctionReturn(0); 1558 } 1559 1560 /* ------------------------------------------------------------------------- */ 1561 /* 1562 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 1563 that was created with PCCreate_GAMG(). 1564 1565 Input Parameter: 1566 . pc - the preconditioner context 1567 1568 Application Interface Routine: PCDestroy() 1569 */ 1570 #undef __FUNCT__ 1571 #define __FUNCT__ "PCDestroy_GAMG" 1572 PetscErrorCode PCDestroy_GAMG(PC pc) 1573 { 1574 PetscErrorCode ierr; 1575 PC_MG *mg = (PC_MG*)pc->data; 1576 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 1577 1578 PetscFunctionBegin; 1579 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 1580 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 1581 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 1582 PetscFunctionReturn(0); 1583 } 1584 1585 1586 #undef __FUNCT__ 1587 #define __FUNCT__ "PCGAMGSetProcEqLim" 1588 /*@ 1589 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via 1590 processor reduction. 1591 1592 Not Collective on PC 1593 1594 Input Parameters: 1595 . pc - the preconditioner context 1596 1597 1598 Options Database Key: 1599 . -pc_gamg_process_eq_limit 1600 1601 Level: intermediate 1602 1603 Concepts: Unstructured multrigrid preconditioner 1604 1605 .seealso: () 1606 @*/ 1607 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 1608 { 1609 PetscErrorCode ierr; 1610 1611 PetscFunctionBegin; 1612 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1613 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1614 PetscFunctionReturn(0); 1615 } 1616 1617 EXTERN_C_BEGIN 1618 #undef __FUNCT__ 1619 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 1620 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 1621 { 1622 PC_MG *mg = (PC_MG*)pc->data; 1623 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1624 1625 PetscFunctionBegin; 1626 if(n>0) pc_gamg->min_eq_proc = n; 1627 PetscFunctionReturn(0); 1628 } 1629 EXTERN_C_END 1630 1631 #undef __FUNCT__ 1632 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 1633 /*@ 1634 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 1635 1636 Collective on PC 1637 1638 Input Parameters: 1639 . pc - the preconditioner context 1640 1641 1642 Options Database Key: 1643 . -pc_gamg_coarse_eq_limit 1644 1645 Level: intermediate 1646 1647 Concepts: Unstructured multrigrid preconditioner 1648 1649 .seealso: () 1650 @*/ 1651 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 1652 { 1653 PetscErrorCode ierr; 1654 1655 PetscFunctionBegin; 1656 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1657 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1658 PetscFunctionReturn(0); 1659 } 1660 1661 EXTERN_C_BEGIN 1662 #undef __FUNCT__ 1663 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 1664 PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 1665 { 1666 PC_MG *mg = (PC_MG*)pc->data; 1667 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1668 1669 PetscFunctionBegin; 1670 if(n>0) pc_gamg->coarse_eq_limit = n; 1671 PetscFunctionReturn(0); 1672 } 1673 EXTERN_C_END 1674 1675 #undef __FUNCT__ 1676 #define __FUNCT__ "PCGAMGSetRepartitioning" 1677 /*@ 1678 PCGAMGSetRepartitioning - Repartition the coarse grids 1679 1680 Collective on PC 1681 1682 Input Parameters: 1683 . pc - the preconditioner context 1684 1685 1686 Options Database Key: 1687 . -pc_gamg_repartition 1688 1689 Level: intermediate 1690 1691 Concepts: Unstructured multrigrid preconditioner 1692 1693 .seealso: () 1694 @*/ 1695 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 1696 { 1697 PetscErrorCode ierr; 1698 1699 PetscFunctionBegin; 1700 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1701 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1702 PetscFunctionReturn(0); 1703 } 1704 1705 EXTERN_C_BEGIN 1706 #undef __FUNCT__ 1707 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 1708 PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 1709 { 1710 PC_MG *mg = (PC_MG*)pc->data; 1711 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1712 1713 PetscFunctionBegin; 1714 pc_gamg->repart = n; 1715 PetscFunctionReturn(0); 1716 } 1717 EXTERN_C_END 1718 1719 #undef __FUNCT__ 1720 #define __FUNCT__ "PCGAMGSetNlevels" 1721 /*@ 1722 PCGAMGSetNlevels - 1723 1724 Not collective on PC 1725 1726 Input Parameters: 1727 . pc - the preconditioner context 1728 1729 1730 Options Database Key: 1731 . -pc_mg_levels 1732 1733 Level: intermediate 1734 1735 Concepts: Unstructured multrigrid preconditioner 1736 1737 .seealso: () 1738 @*/ 1739 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1740 { 1741 PetscErrorCode ierr; 1742 1743 PetscFunctionBegin; 1744 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1745 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1746 PetscFunctionReturn(0); 1747 } 1748 1749 EXTERN_C_BEGIN 1750 #undef __FUNCT__ 1751 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1752 PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1753 { 1754 PC_MG *mg = (PC_MG*)pc->data; 1755 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1756 1757 PetscFunctionBegin; 1758 pc_gamg->Nlevels = n; 1759 PetscFunctionReturn(0); 1760 } 1761 EXTERN_C_END 1762 1763 #undef __FUNCT__ 1764 #define __FUNCT__ "PCGAMGSetThreshold" 1765 /*@ 1766 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1767 1768 Not collective on PC 1769 1770 Input Parameters: 1771 . pc - the preconditioner context 1772 1773 1774 Options Database Key: 1775 . -pc_gamg_threshold 1776 1777 Level: intermediate 1778 1779 Concepts: Unstructured multrigrid preconditioner 1780 1781 .seealso: () 1782 @*/ 1783 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1784 { 1785 PetscErrorCode ierr; 1786 1787 PetscFunctionBegin; 1788 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1789 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1790 PetscFunctionReturn(0); 1791 } 1792 1793 EXTERN_C_BEGIN 1794 #undef __FUNCT__ 1795 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1796 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1797 { 1798 PC_MG *mg = (PC_MG*)pc->data; 1799 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1800 1801 PetscFunctionBegin; 1802 pc_gamg->threshold = n; 1803 PetscFunctionReturn(0); 1804 } 1805 EXTERN_C_END 1806 1807 #undef __FUNCT__ 1808 #define __FUNCT__ "PCGAMGSetType" 1809 /*@ 1810 PCGAMGSetType - Set solution method - calls sub create method 1811 1812 Collective on PC 1813 1814 Input Parameters: 1815 . pc - the preconditioner context 1816 1817 1818 Options Database Key: 1819 . -pc_gamg_type 1820 1821 Level: intermediate 1822 1823 Concepts: Unstructured multrigrid preconditioner 1824 1825 .seealso: () 1826 @*/ 1827 PetscErrorCode PCGAMGSetType( PC pc, const PCGAMGType type ) 1828 { 1829 PetscErrorCode ierr; 1830 1831 PetscFunctionBegin; 1832 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1833 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,const PCGAMGType),(pc,type)); 1834 CHKERRQ(ierr); 1835 PetscFunctionReturn(0); 1836 } 1837 1838 EXTERN_C_BEGIN 1839 #undef __FUNCT__ 1840 #define __FUNCT__ "PCGAMGSetType_GAMG" 1841 PetscErrorCode PCGAMGSetType_GAMG( PC pc, const PCGAMGType type ) 1842 { 1843 PetscErrorCode ierr,(*r)(PC); 1844 1845 PetscFunctionBegin; 1846 ierr = PetscFListFind(GAMGList,((PetscObject)pc)->comm,type,PETSC_FALSE,(PetscVoidStarFunction)&r); 1847 CHKERRQ(ierr); 1848 1849 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1850 1851 /* call sub create method */ 1852 ierr = (*r)(pc); CHKERRQ(ierr); 1853 1854 PetscFunctionReturn(0); 1855 } 1856 EXTERN_C_END 1857 1858 #undef __FUNCT__ 1859 #define __FUNCT__ "PCSetFromOptions_GAMG" 1860 PetscErrorCode PCSetFromOptions_GAMG( PC pc ) 1861 { 1862 PetscErrorCode ierr; 1863 PC_MG *mg = (PC_MG*)pc->data; 1864 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1865 PetscBool flag; 1866 1867 PetscFunctionBegin; 1868 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 1869 { 1870 /* -pc_gamg_verbose */ 1871 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG", 1872 "none", pc_gamg->verbose, 1873 &pc_gamg->verbose, PETSC_NULL ); 1874 CHKERRQ(ierr); 1875 1876 /* -pc_gamg_repartition */ 1877 ierr = PetscOptionsBool("-pc_gamg_repartition", 1878 "Repartion coarse grids (false)", 1879 "PCGAMGRepartitioning", 1880 pc_gamg->repart, 1881 &pc_gamg->repart, 1882 &flag); 1883 CHKERRQ(ierr); 1884 1885 /* -pc_gamg_process_eq_limit */ 1886 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit", 1887 "Limit (goal) on number of equations per process on coarse grids", 1888 "PCGAMGSetProcEqLim", 1889 pc_gamg->min_eq_proc, 1890 &pc_gamg->min_eq_proc, 1891 &flag ); 1892 CHKERRQ(ierr); 1893 1894 /* -pc_gamg_coarse_eq_limit */ 1895 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit", 1896 "Limit on number of equations for the coarse grid", 1897 "PCGAMGSetCoarseEqLim", 1898 pc_gamg->coarse_eq_limit, 1899 &pc_gamg->coarse_eq_limit, 1900 &flag ); 1901 CHKERRQ(ierr); 1902 1903 /* -pc_gamg_threshold */ 1904 ierr = PetscOptionsReal("-pc_gamg_threshold", 1905 "Relative threshold to use for dropping edges in aggregation graph", 1906 "PCGAMGSetThreshold", 1907 pc_gamg->threshold, 1908 &pc_gamg->threshold, 1909 &flag ); 1910 CHKERRQ(ierr); 1911 1912 ierr = PetscOptionsInt("-pc_mg_levels", 1913 "Set number of MG levels", 1914 "PCGAMGSetNlevels", 1915 pc_gamg->Nlevels, 1916 &pc_gamg->Nlevels, 1917 &flag ); 1918 } 1919 ierr = PetscOptionsTail();CHKERRQ(ierr); 1920 1921 PetscFunctionReturn(0); 1922 } 1923 1924 /* -------------------------------------------------------------------------- */ 1925 /*MC 1926 PCCreate_GAMG - Geometric algebraic multigrid (AMG) preconditioning framework. 1927 - This is the entry point to GAMG, registered in pcregis.c 1928 1929 Options Database Keys: 1930 Multigrid options(inherited) 1931 + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType) 1932 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1933 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1934 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade 1935 1936 Level: intermediate 1937 1938 Concepts: multigrid 1939 1940 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType, 1941 PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(), 1942 PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(), 1943 PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(), 1944 PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR() 1945 M*/ 1946 EXTERN_C_BEGIN 1947 #undef __FUNCT__ 1948 #define __FUNCT__ "PCCreate_GAMG" 1949 PetscErrorCode PCCreate_GAMG( PC pc ) 1950 { 1951 PetscErrorCode ierr; 1952 PC_GAMG *pc_gamg; 1953 PC_MG *mg; 1954 PetscClassId cookie; 1955 1956 #if defined PETSC_USE_LOG 1957 static long count = 0; 1958 #endif 1959 1960 PetscFunctionBegin; 1961 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1962 ierr = PCSetType( pc, PCMG ); CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1963 ierr = PetscObjectChangeTypeName( (PetscObject)pc, PCGAMG ); CHKERRQ(ierr); 1964 1965 /* create a supporting struct and attach it to pc */ 1966 ierr = PetscNewLog( pc, PC_GAMG, &pc_gamg ); CHKERRQ(ierr); 1967 mg = (PC_MG*)pc->data; 1968 mg->innerctx = pc_gamg; 1969 1970 pc_gamg->setup_count = 0; 1971 /* these should be in subctx but repartitioning needs simple arrays */ 1972 pc_gamg->data_sz = 0; 1973 pc_gamg->data = 0; 1974 1975 /* register AMG type */ 1976 if( !GAMGList ){ 1977 ierr = PetscFListAdd(&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void(*)(void))PCCreateGAMG_GEO);CHKERRQ(ierr); 1978 ierr = PetscFListAdd(&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void(*)(void))PCCreateGAMG_AGG);CHKERRQ(ierr); 1979 } 1980 1981 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1982 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1983 pc->ops->setup = PCSetUp_GAMG; 1984 pc->ops->reset = PCReset_GAMG; 1985 pc->ops->destroy = PCDestroy_GAMG; 1986 1987 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1988 "PCGAMGSetProcEqLim_C", 1989 "PCGAMGSetProcEqLim_GAMG", 1990 PCGAMGSetProcEqLim_GAMG); 1991 CHKERRQ(ierr); 1992 1993 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1994 "PCGAMGSetCoarseEqLim_C", 1995 "PCGAMGSetCoarseEqLim_GAMG", 1996 PCGAMGSetCoarseEqLim_GAMG); 1997 CHKERRQ(ierr); 1998 1999 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 2000 "PCGAMGSetRepartitioning_C", 2001 "PCGAMGSetRepartitioning_GAMG", 2002 PCGAMGSetRepartitioning_GAMG); 2003 CHKERRQ(ierr); 2004 2005 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 2006 "PCGAMGSetThreshold_C", 2007 "PCGAMGSetThreshold_GAMG", 2008 PCGAMGSetThreshold_GAMG); 2009 CHKERRQ(ierr); 2010 2011 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 2012 "PCGAMGSetType_C", 2013 "PCGAMGSetType_GAMG", 2014 PCGAMGSetType_GAMG); 2015 CHKERRQ(ierr); 2016 2017 pc_gamg->repart = PETSC_FALSE; 2018 pc_gamg->min_eq_proc = 100; 2019 pc_gamg->coarse_eq_limit = 1000; 2020 pc_gamg->threshold = 0.05; 2021 pc_gamg->Nlevels = GAMG_MAXLEVELS; 2022 pc_gamg->verbose = 0; 2023 pc_gamg->emax_id = -1; 2024 pc_gamg->col_bs_id = -1; 2025 2026 /* instantiate derived type */ 2027 ierr = PetscOptionsHead("GAMG options"); CHKERRQ(ierr); 2028 { 2029 char tname[256] = GAMGAGG; 2030 ierr = PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType", 2031 GAMGList, tname, tname, sizeof(tname), PETSC_NULL ); 2032 CHKERRQ(ierr); 2033 ierr = PCGAMGSetType( pc, tname ); CHKERRQ(ierr); 2034 } 2035 ierr = PetscOptionsTail();CHKERRQ(ierr); 2036 2037 #if defined PETSC_USE_LOG 2038 if( count++ == 0 ) { 2039 PetscClassIdRegister("GAMG Setup",&cookie); 2040 PetscLogEventRegister("GAMG: createProl", cookie, &gamg_setup_events[SET1]); 2041 PetscLogEventRegister(" Graph", cookie, &gamg_setup_events[GRAPH]); 2042 PetscLogEventRegister(" G.Mat", cookie, &gamg_setup_events[GRAPH_MAT]); 2043 PetscLogEventRegister(" G.Filter", cookie, &gamg_setup_events[GRAPH_FILTER]); 2044 PetscLogEventRegister(" G.Square", cookie, &gamg_setup_events[GRAPH_SQR]); 2045 PetscLogEventRegister(" MIS/Agg", cookie, &gamg_setup_events[SET4]); 2046 PetscLogEventRegister(" geo: growSupp", cookie, &gamg_setup_events[SET5]); 2047 PetscLogEventRegister(" geo: triangle", cookie, &gamg_setup_events[SET6]); 2048 PetscLogEventRegister(" search&set", cookie, &gamg_setup_events[FIND_V]); 2049 PetscLogEventRegister(" SA: init", cookie, &gamg_setup_events[SET7]); 2050 /* PetscLogEventRegister(" SA: frmProl0", cookie, &gamg_setup_events[SET8]); */ 2051 PetscLogEventRegister(" SA: smooth", cookie, &gamg_setup_events[SET9]); 2052 PetscLogEventRegister("GAMG: partLevel", cookie, &gamg_setup_events[SET2]); 2053 PetscLogEventRegister(" repartition", cookie, &gamg_setup_events[SET12]); 2054 PetscLogEventRegister(" Invert-Sort", cookie, &gamg_setup_events[SET13]); 2055 PetscLogEventRegister(" Move A", cookie, &gamg_setup_events[SET14]); 2056 PetscLogEventRegister(" Move P", cookie, &gamg_setup_events[SET15]); 2057 2058 /* PetscLogEventRegister(" PL move data", cookie, &gamg_setup_events[SET13]); */ 2059 /* PetscLogEventRegister("GAMG: fix", cookie, &gamg_setup_events[SET10]); */ 2060 /* PetscLogEventRegister("GAMG: set levels", cookie, &gamg_setup_events[SET11]); */ 2061 2062 /* create timer stages */ 2063 #if defined GAMG_STAGES 2064 { 2065 char str[32]; 2066 sprintf(str,"MG Level %d (finest)",0); 2067 PetscLogStageRegister(str, &gamg_stages[0]); 2068 PetscInt lidx; 2069 for (lidx=1;lidx<9;lidx++){ 2070 sprintf(str,"MG Level %d",lidx); 2071 PetscLogStageRegister(str, &gamg_stages[lidx]); 2072 } 2073 } 2074 #endif 2075 } 2076 #endif 2077 PetscFunctionReturn(0); 2078 } 2079 EXTERN_C_END 2080