1 /* 2 GAMG geometric-algebric multiogrid PC - Mark Adams 2011 3 */ 4 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 8 #include <assert.h> 9 #include <petscblaslapack.h> 10 11 typedef struct { 12 PetscInt nsmooths; 13 PetscBool sym_graph; 14 PetscBool square_graph; 15 }PC_GAMG_AGG; 16 17 #undef __FUNCT__ 18 #define __FUNCT__ "PCGAMGSetNSmooths" 19 /*@ 20 PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical) 21 22 Not Collective on PC 23 24 Input Parameters: 25 . pc - the preconditioner context 26 27 Options Database Key: 28 . -pc_gamg_agg_nsmooths 29 30 Level: intermediate 31 32 Concepts: Aggregation AMG preconditioner 33 34 .seealso: () 35 @*/ 36 PetscErrorCode PCGAMGSetNSmooths(PC pc, PetscInt n) 37 { 38 PetscErrorCode ierr; 39 40 PetscFunctionBegin; 41 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 42 ierr = PetscTryMethod(pc,"PCGAMGSetNSmooths_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 43 PetscFunctionReturn(0); 44 } 45 46 EXTERN_C_BEGIN 47 #undef __FUNCT__ 48 #define __FUNCT__ "PCGAMGSetNSmooths_GAMG" 49 PetscErrorCode PCGAMGSetNSmooths_GAMG(PC pc, PetscInt n) 50 { 51 PC_MG *mg = (PC_MG*)pc->data; 52 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 53 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 54 55 PetscFunctionBegin; 56 pc_gamg_agg->nsmooths = n; 57 PetscFunctionReturn(0); 58 } 59 EXTERN_C_END 60 61 #undef __FUNCT__ 62 #define __FUNCT__ "PCGAMGSetSymGraph" 63 /*@ 64 PCGAMGSetSymGraph - 65 66 Not Collective on PC 67 68 Input Parameters: 69 . pc - the preconditioner context 70 71 Options Database Key: 72 . -pc_gamg_sym_graph 73 74 Level: intermediate 75 76 Concepts: Aggregation AMG preconditioner 77 78 .seealso: () 79 @*/ 80 PetscErrorCode PCGAMGSetSymGraph(PC pc, PetscBool n) 81 { 82 PetscErrorCode ierr; 83 84 PetscFunctionBegin; 85 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 86 ierr = PetscTryMethod(pc,"PCGAMGSetSymGraph_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 87 PetscFunctionReturn(0); 88 } 89 90 EXTERN_C_BEGIN 91 #undef __FUNCT__ 92 #define __FUNCT__ "PCGAMGSetSymGraph_GAMG" 93 PetscErrorCode PCGAMGSetSymGraph_GAMG(PC pc, PetscBool n) 94 { 95 PC_MG *mg = (PC_MG*)pc->data; 96 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 97 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 98 99 PetscFunctionBegin; 100 pc_gamg_agg->sym_graph = n; 101 PetscFunctionReturn(0); 102 } 103 EXTERN_C_END 104 105 #undef __FUNCT__ 106 #define __FUNCT__ "PCGAMGSetSquareGraph" 107 /*@ 108 PCGAMGSetSquareGraph - 109 110 Not Collective on PC 111 112 Input Parameters: 113 . pc - the preconditioner context 114 115 Options Database Key: 116 . -pc_gamg_square_graph 117 118 Level: intermediate 119 120 Concepts: Aggregation AMG preconditioner 121 122 .seealso: () 123 @*/ 124 PetscErrorCode PCGAMGSetSquareGraph(PC pc, PetscBool n) 125 { 126 PetscErrorCode ierr; 127 128 PetscFunctionBegin; 129 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 130 ierr = PetscTryMethod(pc,"PCGAMGSetSquareGraph_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 131 PetscFunctionReturn(0); 132 } 133 134 EXTERN_C_BEGIN 135 #undef __FUNCT__ 136 #define __FUNCT__ "PCGAMGSetSquareGraph_GAMG" 137 PetscErrorCode PCGAMGSetSquareGraph_GAMG(PC pc, PetscBool n) 138 { 139 PC_MG *mg = (PC_MG*)pc->data; 140 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 141 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 142 143 PetscFunctionBegin; 144 pc_gamg_agg->square_graph = n; 145 PetscFunctionReturn(0); 146 } 147 EXTERN_C_END 148 149 /* -------------------------------------------------------------------------- */ 150 /* 151 PCSetFromOptions_GAMG_AGG 152 153 Input Parameter: 154 . pc - 155 */ 156 #undef __FUNCT__ 157 #define __FUNCT__ "PCSetFromOptions_GAMG_AGG" 158 PetscErrorCode PCSetFromOptions_GAMG_AGG( PC pc ) 159 { 160 PetscErrorCode ierr; 161 PC_MG *mg = (PC_MG*)pc->data; 162 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 163 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 164 PetscBool flag; 165 166 PetscFunctionBegin; 167 /* call base class */ 168 ierr = PCSetFromOptions_GAMG( pc ); CHKERRQ(ierr); 169 170 ierr = PetscOptionsHead("GAMG-AGG options"); CHKERRQ(ierr); 171 { 172 /* -pc_gamg_agg_nsmooths */ 173 pc_gamg_agg->nsmooths = 0; 174 ierr = PetscOptionsInt("-pc_gamg_agg_nsmooths", 175 "smoothing steps for smoothed aggregation, usually 1 (0)", 176 "PCGAMGSetNSmooths", 177 pc_gamg_agg->nsmooths, 178 &pc_gamg_agg->nsmooths, 179 &flag); 180 CHKERRQ(ierr); 181 182 /* -pc_gamg_sym_graph */ 183 pc_gamg_agg->sym_graph = PETSC_FALSE; 184 ierr = PetscOptionsBool("-pc_gamg_sym_graph", 185 "Set for asymmetric matrices", 186 "PCGAMGSetSymGraph", 187 pc_gamg_agg->sym_graph, 188 &pc_gamg_agg->sym_graph, 189 &flag); 190 CHKERRQ(ierr); 191 192 /* -pc_gamg_square_graph */ 193 pc_gamg_agg->square_graph = PETSC_TRUE; 194 ierr = PetscOptionsBool("-pc_gamg_square_graph", 195 "For faster coarsening and lower coarse grid complexity", 196 "PCGAMGSetSquareGraph", 197 pc_gamg_agg->square_graph, 198 &pc_gamg_agg->square_graph, 199 &flag); 200 CHKERRQ(ierr); 201 } 202 ierr = PetscOptionsTail();CHKERRQ(ierr); 203 204 PetscFunctionReturn(0); 205 } 206 207 /* -------------------------------------------------------------------------- */ 208 /* 209 PCDestroy_AGG 210 211 Input Parameter: 212 . pc - 213 */ 214 #undef __FUNCT__ 215 #define __FUNCT__ "PCDestroy_AGG" 216 PetscErrorCode PCDestroy_AGG( PC pc ) 217 { 218 PetscErrorCode ierr; 219 PC_MG *mg = (PC_MG*)pc->data; 220 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 221 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 222 223 PetscFunctionBegin; 224 if( pc_gamg_agg ) { 225 ierr = PetscFree(pc_gamg_agg);CHKERRQ(ierr); 226 pc_gamg_agg = 0; 227 } 228 229 /* call base class */ 230 ierr = PCDestroy_GAMG( pc );CHKERRQ(ierr); 231 232 PetscFunctionReturn(0); 233 } 234 235 /* -------------------------------------------------------------------------- */ 236 /* 237 PCSetCoordinates_AGG 238 - collective 239 240 Input Parameter: 241 . pc - the preconditioner context 242 . ndm - dimesion of data (used for dof/vertex for Stokes) 243 . a_nloc - number of vertices local 244 . coords - [a_nloc][ndm] - interleaved coordinate data: {x_0, y_0, z_0, x_1, y_1, ...} 245 */ 246 EXTERN_C_BEGIN 247 #undef __FUNCT__ 248 #define __FUNCT__ "PCSetCoordinates_AGG" 249 PetscErrorCode PCSetCoordinates_AGG( PC pc, PetscInt ndm, PetscInt a_nloc, PetscReal *coords ) 250 { 251 PC_MG *mg = (PC_MG*)pc->data; 252 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 253 PetscErrorCode ierr; 254 PetscInt arrsz,kk,ii,jj,nloc,ndatarows,ndf; 255 Mat mat = pc->pmat; 256 /* MPI_Comm wcomm = ((PetscObject)pc)->comm; */ 257 258 PetscFunctionBegin; 259 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 260 PetscValidHeaderSpecific(mat,MAT_CLASSID,1); 261 nloc = a_nloc; 262 263 /* SA: null space vectors */ 264 ierr = MatGetBlockSize( mat, &ndf ); CHKERRQ( ierr ); /* this does not work for Stokes */ 265 if( coords && ndf==1 ) pc_gamg->data_cell_cols = 1; /* scalar w/ coords and SA (not needed) */ 266 else if( coords ) { 267 if(ndm > ndf) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"degrees of motion %d > block size %d",ndm,ndf); 268 pc_gamg->data_cell_cols = (ndm==2 ? 3 : 6); /* displacement elasticity */ 269 if(ndm != ndf) { 270 if(pc_gamg->data_cell_cols != ndf) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Don't know how to create null space for ndm=%d, ndf=%d. Use MatSetNearNullSpace.",ndm,ndf); 271 } 272 } 273 else pc_gamg->data_cell_cols = ndf; /* no data, force SA with constant null space vectors */ 274 pc_gamg->data_cell_rows = ndatarows = ndf; assert(pc_gamg->data_cell_cols>0); 275 arrsz = nloc*pc_gamg->data_cell_rows*pc_gamg->data_cell_cols; 276 277 /* create data - syntactic sugar that should be refactored at some point */ 278 if (pc_gamg->data==0 || (pc_gamg->data_sz != arrsz)) { 279 ierr = PetscFree( pc_gamg->data ); CHKERRQ(ierr); 280 ierr = PetscMalloc((arrsz+1)*sizeof(PetscReal), &pc_gamg->data ); CHKERRQ(ierr); 281 } 282 /* copy data in - column oriented */ 283 for(kk=0;kk<nloc;kk++){ 284 const PetscInt M = nloc*pc_gamg->data_cell_rows; /* stride into data */ 285 PetscReal *data = &pc_gamg->data[kk*ndatarows]; /* start of cell */ 286 if( pc_gamg->data_cell_cols==1 ) *data = 1.0; 287 else { 288 /* translational modes */ 289 for(ii=0;ii<ndatarows;ii++) 290 for(jj=0;jj<ndatarows;jj++) 291 if(ii==jj)data[ii*M + jj] = 1.0; 292 else data[ii*M + jj] = 0.0; 293 294 /* rotational modes */ 295 if( coords ) { 296 if( ndm == 2 ){ 297 data += 2*M; 298 data[0] = -coords[2*kk+1]; 299 data[1] = coords[2*kk]; 300 } 301 else { 302 data += 3*M; 303 data[0] = 0.0; data[M+0] = coords[3*kk+2]; data[2*M+0] = -coords[3*kk+1]; 304 data[1] = -coords[3*kk+2]; data[M+1] = 0.0; data[2*M+1] = coords[3*kk]; 305 data[2] = coords[3*kk+1]; data[M+2] = -coords[3*kk]; data[2*M+2] = 0.0; 306 } 307 } 308 } 309 } 310 311 pc_gamg->data_sz = arrsz; 312 313 PetscFunctionReturn(0); 314 } 315 EXTERN_C_END 316 317 typedef PetscInt NState; 318 static const NState NOT_DONE=-2; 319 static const NState DELETED=-1; 320 static const NState REMOVED=-3; 321 #define IS_SELECTED(s) (s!=DELETED && s!=NOT_DONE && s!=REMOVED) 322 323 /* -------------------------------------------------------------------------- */ 324 /* 325 smoothAggs - greedy grab of with G1 (unsquared graph) -- AIJ specific 326 - AGG-MG specific: clears singletons out of 'selected_2' 327 328 Input Parameter: 329 . Gmat_2 - glabal matrix of graph (data not defined) 330 . Gmat_1 - base graph to grab with 331 Input/Output Parameter: 332 . aggs_2 - linked list of aggs with gids ) 333 */ 334 #undef __FUNCT__ 335 #define __FUNCT__ "smoothAggs" 336 static PetscErrorCode smoothAggs( const Mat Gmat_2, /* base (squared) graph */ 337 const Mat Gmat_1, /* base graph */ 338 /* const IS selected_2, [nselected local] selected vertices */ 339 PetscCoarsenData *aggs_2 /* [nselected local] global ID of aggregate */ 340 ) 341 { 342 PetscErrorCode ierr; 343 PetscBool isMPI; 344 Mat_SeqAIJ *matA_1, *matB_1=0, *matA_2, *matB_2=0; 345 MPI_Comm wcomm = ((PetscObject)Gmat_2)->comm; 346 PetscMPIInt mype,npe; 347 PetscInt lid,*ii,*idx,ix,Iend,my0,kk,n,j; 348 Mat_MPIAIJ *mpimat_2 = 0, *mpimat_1=0; 349 const PetscInt nloc = Gmat_2->rmap->n; 350 PetscScalar *cpcol_1_state,*cpcol_2_state,*cpcol_2_par_orig,*lid_parent_gid; 351 PetscInt *lid_cprowID_1; 352 NState *lid_state; 353 Vec ghost_par_orig2; 354 355 PetscFunctionBegin; 356 ierr = MPI_Comm_rank( wcomm, &mype ); CHKERRQ(ierr); 357 ierr = MPI_Comm_size( wcomm, &npe ); CHKERRQ(ierr); 358 ierr = MatGetOwnershipRange(Gmat_1,&my0,&Iend); CHKERRQ(ierr); 359 360 if( PETSC_FALSE ) { 361 PetscViewer viewer; char fname[32]; static int llev=0; 362 sprintf(fname,"Gmat2_%d.m",llev++); 363 PetscViewerASCIIOpen(wcomm,fname,&viewer); 364 ierr = PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB); CHKERRQ(ierr); 365 ierr = MatView(Gmat_2, viewer ); CHKERRQ(ierr); 366 ierr = PetscViewerDestroy( &viewer ); 367 } 368 369 /* get submatrices */ 370 ierr = PetscObjectTypeCompare( (PetscObject)Gmat_1, MATMPIAIJ, &isMPI ); CHKERRQ(ierr); 371 if(isMPI) { 372 /* grab matrix objects */ 373 mpimat_2 = (Mat_MPIAIJ*)Gmat_2->data; 374 mpimat_1 = (Mat_MPIAIJ*)Gmat_1->data; 375 matA_1 = (Mat_SeqAIJ*)mpimat_1->A->data; 376 matB_1 = (Mat_SeqAIJ*)mpimat_1->B->data; 377 matA_2 = (Mat_SeqAIJ*)mpimat_2->A->data; 378 matB_2 = (Mat_SeqAIJ*)mpimat_2->B->data; 379 380 /* force compressed row storage for B matrix in AuxMat */ 381 matB_1->compressedrow.check = PETSC_TRUE; 382 ierr = MatCheckCompressedRow(mpimat_1->B,&matB_1->compressedrow,matB_1->i,Gmat_1->rmap->n,-1.0); 383 CHKERRQ(ierr); 384 385 ierr = PetscMalloc( nloc*sizeof(PetscInt), &lid_cprowID_1 ); CHKERRQ(ierr); 386 for( lid = 0 ; lid < nloc ; lid++ ) lid_cprowID_1[lid] = -1; 387 for (ix=0; ix<matB_1->compressedrow.nrows; ix++) { 388 PetscInt lid = matB_1->compressedrow.rindex[ix]; 389 lid_cprowID_1[lid] = ix; 390 } 391 } 392 else { 393 matA_1 = (Mat_SeqAIJ*)Gmat_1->data; 394 matA_2 = (Mat_SeqAIJ*)Gmat_2->data; 395 lid_cprowID_1 = PETSC_NULL; 396 } 397 assert( matA_1 && !matA_1->compressedrow.use ); 398 assert( matB_1==0 || matB_1->compressedrow.use ); 399 assert( matA_2 && !matA_2->compressedrow.use ); 400 assert( matB_2==0 || matB_2->compressedrow.use ); 401 402 /* get state of locals and selected gid for deleted */ 403 ierr = PetscMalloc( nloc*sizeof(NState), &lid_state ); CHKERRQ(ierr); 404 ierr = PetscMalloc( nloc*sizeof(PetscScalar), &lid_parent_gid ); CHKERRQ(ierr); 405 for( lid = 0 ; lid < nloc ; lid++ ) { 406 lid_parent_gid[lid] = -1.0; 407 lid_state[lid] = DELETED; 408 } 409 410 /* set lid_state */ 411 for( lid = 0 ; lid < nloc ; lid++ ) { 412 PetscCDPos pos; 413 ierr = PetscCDGetHeadPos(aggs_2,lid,&pos); CHKERRQ(ierr); 414 if( pos ) { 415 PetscInt gid1; 416 ierr = PetscLLNGetID( pos, &gid1 ); CHKERRQ(ierr); assert(gid1==lid+my0); 417 lid_state[lid] = gid1; 418 } 419 } 420 421 /* map local to selected local, DELETED means a ghost owns it */ 422 for(lid=kk=0;lid<nloc;lid++){ 423 NState state = lid_state[lid]; 424 if( IS_SELECTED(state) ){ 425 PetscCDPos pos; 426 ierr = PetscCDGetHeadPos(aggs_2,lid,&pos); CHKERRQ(ierr); 427 while(pos){ 428 PetscInt gid1; 429 ierr = PetscLLNGetID( pos, &gid1 ); CHKERRQ(ierr); 430 ierr = PetscCDGetNextPos(aggs_2,lid,&pos); CHKERRQ(ierr); 431 432 if( gid1 >= my0 && gid1 < Iend ){ 433 lid_parent_gid[gid1-my0] = (PetscScalar)(lid + my0); 434 } 435 } 436 } 437 } 438 /* get 'cpcol_1/2_state' & cpcol_2_par_orig - uses mpimat_1/2->lvec for temp space */ 439 if (isMPI) { 440 Vec tempVec; 441 /* get 'cpcol_1_state' */ 442 ierr = MatGetVecs( Gmat_1, &tempVec, 0 ); CHKERRQ(ierr); 443 for(kk=0,j=my0;kk<nloc;kk++,j++){ 444 PetscScalar v = (PetscScalar)lid_state[kk]; 445 ierr = VecSetValues( tempVec, 1, &j, &v, INSERT_VALUES ); CHKERRQ(ierr); 446 } 447 ierr = VecAssemblyBegin( tempVec ); CHKERRQ(ierr); 448 ierr = VecAssemblyEnd( tempVec ); CHKERRQ(ierr); 449 ierr = VecScatterBegin(mpimat_1->Mvctx,tempVec, mpimat_1->lvec,INSERT_VALUES,SCATTER_FORWARD); 450 CHKERRQ(ierr); 451 ierr = VecScatterEnd(mpimat_1->Mvctx,tempVec, mpimat_1->lvec,INSERT_VALUES,SCATTER_FORWARD); 452 CHKERRQ(ierr); 453 ierr = VecGetArray( mpimat_1->lvec, &cpcol_1_state ); CHKERRQ(ierr); 454 /* get 'cpcol_2_state' */ 455 ierr = VecScatterBegin(mpimat_2->Mvctx,tempVec, mpimat_2->lvec,INSERT_VALUES,SCATTER_FORWARD); 456 CHKERRQ(ierr); 457 ierr = VecScatterEnd(mpimat_2->Mvctx,tempVec, mpimat_2->lvec,INSERT_VALUES,SCATTER_FORWARD); 458 CHKERRQ(ierr); 459 ierr = VecGetArray( mpimat_2->lvec, &cpcol_2_state ); CHKERRQ(ierr); 460 /* get 'cpcol_2_par_orig' */ 461 for(kk=0,j=my0;kk<nloc;kk++,j++){ 462 PetscScalar v = (PetscScalar)lid_parent_gid[kk]; 463 ierr = VecSetValues( tempVec, 1, &j, &v, INSERT_VALUES ); CHKERRQ(ierr); 464 } 465 ierr = VecAssemblyBegin( tempVec ); CHKERRQ(ierr); 466 ierr = VecAssemblyEnd( tempVec ); CHKERRQ(ierr); 467 ierr = VecDuplicate( mpimat_2->lvec, &ghost_par_orig2 ); CHKERRQ(ierr); 468 ierr = VecScatterBegin(mpimat_2->Mvctx,tempVec, ghost_par_orig2,INSERT_VALUES,SCATTER_FORWARD); 469 CHKERRQ(ierr); 470 ierr = VecScatterEnd(mpimat_2->Mvctx,tempVec, ghost_par_orig2,INSERT_VALUES,SCATTER_FORWARD); 471 CHKERRQ(ierr); 472 ierr = VecGetArray( ghost_par_orig2, &cpcol_2_par_orig ); CHKERRQ(ierr); 473 474 ierr = VecDestroy( &tempVec ); CHKERRQ(ierr); 475 } /* ismpi */ 476 477 /* doit */ 478 for(lid=0;lid<nloc;lid++){ 479 NState state = lid_state[lid]; 480 if( IS_SELECTED(state) ) { 481 /* steal locals */ 482 ii = matA_1->i; n = ii[lid+1] - ii[lid]; 483 idx = matA_1->j + ii[lid]; 484 for (j=0; j<n; j++) { 485 PetscInt lidj = idx[j], sgid; 486 NState statej = lid_state[lidj]; 487 if (statej==DELETED && (sgid=(PetscInt)PetscRealPart(lid_parent_gid[lidj])) != lid+my0) { /* steal local */ 488 lid_parent_gid[lidj] = (PetscScalar)(lid+my0); /* send this if sgid is not local */ 489 if( sgid >= my0 && sgid < Iend ){ /* I'm stealing this local from a local sgid */ 490 PetscInt hav=0,slid=sgid-my0,gidj=lidj+my0; 491 PetscCDPos pos,last=PETSC_NULL; 492 /* looking for local from local so id_llist_2 works */ 493 ierr = PetscCDGetHeadPos(aggs_2,slid,&pos); CHKERRQ(ierr); 494 while(pos){ 495 PetscInt gid; 496 ierr = PetscLLNGetID( pos, &gid ); CHKERRQ(ierr); 497 if( gid == gidj ) { 498 assert(last); 499 ierr = PetscCDRemoveNextNode( aggs_2, slid, last ); CHKERRQ(ierr); 500 ierr = PetscCDAppendNode( aggs_2, lid, pos ); CHKERRQ(ierr); 501 hav = 1; 502 break; 503 } 504 else last = pos; 505 506 ierr = PetscCDGetNextPos(aggs_2,slid,&pos); CHKERRQ(ierr); 507 } 508 if(hav!=1){ 509 if(hav==0)SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"failed to find adj in 'selected' lists - structurally unsymmetric matrix"); 510 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"found node %d times???",hav); 511 } 512 } 513 else{ /* I'm stealing this local, owned by a ghost */ 514 assert(sgid==-1); 515 ierr = PetscCDAppendID( aggs_2, lid, lidj+my0 ); CHKERRQ(ierr); 516 } 517 } 518 } /* local neighbors */ 519 } 520 else if( state == DELETED && lid_cprowID_1 ) { 521 PetscInt sgidold = (PetscInt)PetscRealPart(lid_parent_gid[lid]); 522 /* see if I have a selected ghost neighbor that will steal me */ 523 if( (ix=lid_cprowID_1[lid]) != -1 ){ 524 ii = matB_1->compressedrow.i; n = ii[ix+1] - ii[ix]; 525 idx = matB_1->j + ii[ix]; 526 for( j=0 ; j<n ; j++ ) { 527 PetscInt cpid = idx[j]; 528 NState statej = (NState)PetscRealPart(cpcol_1_state[cpid]); 529 if( IS_SELECTED(statej) && sgidold != (PetscInt)statej ) { /* ghost will steal this, remove from my list */ 530 lid_parent_gid[lid] = (PetscScalar)statej; /* send who selected */ 531 if( sgidold>=my0 && sgidold<Iend ) { /* this was mine */ 532 PetscInt hav=0,oldslidj=sgidold-my0; 533 PetscCDPos pos,last=PETSC_NULL; 534 /* remove from 'oldslidj' list */ 535 ierr = PetscCDGetHeadPos(aggs_2,oldslidj,&pos); CHKERRQ(ierr); 536 while( pos ) { 537 PetscInt gid; 538 ierr = PetscLLNGetID( pos, &gid ); CHKERRQ(ierr); 539 if( lid+my0 == gid ) { 540 /* id_llist_2[lastid] = id_llist_2[flid]; /\* remove lid from oldslidj list *\/ */ 541 assert(last); 542 ierr = PetscCDRemoveNextNode( aggs_2, oldslidj, last ); CHKERRQ(ierr); 543 /* ghost (PetscScalar)statej will add this later */ 544 hav = 1; 545 break; 546 } 547 else last = pos; 548 549 ierr = PetscCDGetNextPos(aggs_2,oldslidj,&pos); CHKERRQ(ierr); 550 } 551 if(hav!=1){ 552 if(hav==0)SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"failed to find adj in 'selected' lists - structurally unsymmetric matrix"); 553 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"found node %d times???",hav); 554 } 555 } 556 else { 557 /* ghosts remove this later */ 558 } 559 } 560 } 561 } 562 } /* selected/deleted */ 563 } /* node loop */ 564 565 if( isMPI ) { 566 PetscScalar *cpcol_2_parent,*cpcol_2_gid; 567 Vec tempVec,ghostgids2,ghostparents2; 568 PetscInt cpid,nghost_2; 569 GAMGHashTable gid_cpid; 570 571 ierr = VecGetSize( mpimat_2->lvec, &nghost_2 ); CHKERRQ(ierr); 572 ierr = MatGetVecs( Gmat_2, &tempVec, 0 ); CHKERRQ(ierr); 573 574 /* get 'cpcol_2_parent' */ 575 for(kk=0,j=my0;kk<nloc;kk++,j++){ 576 ierr = VecSetValues( tempVec, 1, &j, &lid_parent_gid[kk], INSERT_VALUES ); CHKERRQ(ierr); 577 } 578 ierr = VecAssemblyBegin( tempVec ); CHKERRQ(ierr); 579 ierr = VecAssemblyEnd( tempVec ); CHKERRQ(ierr); 580 ierr = VecDuplicate( mpimat_2->lvec, &ghostparents2 ); CHKERRQ(ierr); 581 ierr = VecScatterBegin(mpimat_2->Mvctx,tempVec, ghostparents2,INSERT_VALUES,SCATTER_FORWARD); 582 CHKERRQ(ierr); 583 ierr = VecScatterEnd(mpimat_2->Mvctx,tempVec, ghostparents2,INSERT_VALUES,SCATTER_FORWARD); 584 CHKERRQ(ierr); 585 ierr = VecGetArray( ghostparents2, &cpcol_2_parent ); CHKERRQ(ierr); 586 587 /* get 'cpcol_2_gid' */ 588 for(kk=0,j=my0;kk<nloc;kk++,j++){ 589 PetscScalar v = (PetscScalar)j; 590 ierr = VecSetValues( tempVec, 1, &j, &v, INSERT_VALUES ); CHKERRQ(ierr); 591 } 592 ierr = VecAssemblyBegin( tempVec ); CHKERRQ(ierr); 593 ierr = VecAssemblyEnd( tempVec ); CHKERRQ(ierr); 594 ierr = VecDuplicate( mpimat_2->lvec, &ghostgids2 ); CHKERRQ(ierr); 595 ierr = VecScatterBegin(mpimat_2->Mvctx,tempVec, ghostgids2,INSERT_VALUES,SCATTER_FORWARD); 596 CHKERRQ(ierr); 597 ierr = VecScatterEnd(mpimat_2->Mvctx,tempVec, ghostgids2,INSERT_VALUES,SCATTER_FORWARD); 598 CHKERRQ(ierr); 599 ierr = VecGetArray( ghostgids2, &cpcol_2_gid ); CHKERRQ(ierr); 600 601 ierr = VecDestroy( &tempVec ); CHKERRQ(ierr); 602 603 /* look for deleted ghosts and add to table */ 604 ierr = GAMGTableCreate( 2*nghost_2, &gid_cpid ); CHKERRQ(ierr); 605 for( cpid = 0 ; cpid < nghost_2 ; cpid++ ) { 606 NState state = (NState)PetscRealPart(cpcol_2_state[cpid]); 607 if( state==DELETED ) { 608 PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]); 609 PetscInt sgid_old = (PetscInt)PetscRealPart(cpcol_2_par_orig[cpid]); 610 if( sgid_old == -1 && sgid_new != -1 ) { 611 PetscInt gid = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]); 612 ierr = GAMGTableAdd( &gid_cpid, gid, cpid ); CHKERRQ(ierr); 613 } 614 } 615 } 616 617 /* look for deleted ghosts and see if they moved - remove it */ 618 for(lid=0;lid<nloc;lid++){ 619 NState state = lid_state[lid]; 620 if( IS_SELECTED(state) ){ 621 PetscCDPos pos,last=PETSC_NULL; 622 /* look for deleted ghosts and see if they moved */ 623 ierr = PetscCDGetHeadPos(aggs_2,lid,&pos); CHKERRQ(ierr); 624 while(pos){ 625 PetscInt gid; 626 ierr = PetscLLNGetID( pos, &gid ); CHKERRQ(ierr); 627 628 if( gid < my0 || gid >= Iend ) { 629 ierr = GAMGTableFind( &gid_cpid, gid, &cpid ); CHKERRQ(ierr); 630 if( cpid != -1 ) { 631 /* a moved ghost - */ 632 /* id_llist_2[lastid] = id_llist_2[flid]; /\* remove 'flid' from list *\/ */ 633 ierr = PetscCDRemoveNextNode( aggs_2, lid, last ); CHKERRQ(ierr); 634 } 635 else last = pos; 636 } 637 else last = pos; 638 639 ierr = PetscCDGetNextPos(aggs_2,lid,&pos); CHKERRQ(ierr); 640 } /* loop over list of deleted */ 641 } /* selected */ 642 } 643 ierr = GAMGTableDestroy( &gid_cpid ); CHKERRQ(ierr); 644 645 /* look at ghosts, see if they changed - and it */ 646 for( cpid = 0 ; cpid < nghost_2 ; cpid++ ){ 647 PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]); 648 if( sgid_new >= my0 && sgid_new < Iend ) { /* this is mine */ 649 PetscInt gid = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]); 650 PetscInt slid_new=sgid_new-my0,hav=0; 651 PetscCDPos pos; 652 /* search for this gid to see if I have it */ 653 ierr = PetscCDGetHeadPos(aggs_2,slid_new,&pos); CHKERRQ(ierr); 654 while(pos){ 655 PetscInt gidj; 656 ierr = PetscLLNGetID( pos, &gidj ); CHKERRQ(ierr); 657 ierr = PetscCDGetNextPos(aggs_2,slid_new,&pos); CHKERRQ(ierr); 658 659 if( gidj == gid ) { hav = 1; break; } 660 } 661 if( hav != 1 ){ 662 /* insert 'flidj' into head of llist */ 663 ierr = PetscCDAppendID( aggs_2, slid_new, gid ); CHKERRQ(ierr); 664 } 665 } 666 } 667 668 ierr = VecRestoreArray( mpimat_1->lvec, &cpcol_1_state ); CHKERRQ(ierr); 669 ierr = VecRestoreArray( mpimat_2->lvec, &cpcol_2_state ); CHKERRQ(ierr); 670 ierr = VecRestoreArray( ghostparents2, &cpcol_2_parent ); CHKERRQ(ierr); 671 ierr = VecRestoreArray( ghostgids2, &cpcol_2_gid ); CHKERRQ(ierr); 672 ierr = PetscFree( lid_cprowID_1 ); CHKERRQ(ierr); 673 ierr = VecDestroy( &ghostgids2 ); CHKERRQ(ierr); 674 ierr = VecDestroy( &ghostparents2 ); CHKERRQ(ierr); 675 ierr = VecDestroy( &ghost_par_orig2 ); CHKERRQ(ierr); 676 } 677 678 ierr = PetscFree( lid_parent_gid ); CHKERRQ(ierr); 679 ierr = PetscFree( lid_state ); CHKERRQ(ierr); 680 681 PetscFunctionReturn(0); 682 } 683 684 /* -------------------------------------------------------------------------- */ 685 /* 686 PCSetData_AGG - called if data is not set with PCSetCoordinates. 687 Looks in Mat for near null space. 688 Does not work for Stokes 689 690 Input Parameter: 691 . pc - 692 . a_A - matrix to get (near) null space out of. 693 */ 694 #undef __FUNCT__ 695 #define __FUNCT__ "PCSetData_AGG" 696 PetscErrorCode PCSetData_AGG( PC pc, Mat a_A ) 697 { 698 PetscErrorCode ierr; 699 PC_MG *mg = (PC_MG*)pc->data; 700 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 701 MatNullSpace mnull; 702 703 PetscFunctionBegin; 704 ierr = MatGetNearNullSpace( a_A, &mnull ); CHKERRQ(ierr); 705 if( !mnull ) { 706 PetscInt bs,NN,MM; 707 ierr = MatGetBlockSize( a_A, &bs ); CHKERRQ( ierr ); 708 ierr = MatGetLocalSize( a_A, &MM, &NN ); CHKERRQ( ierr ); assert(MM%bs==0); 709 PetscPrintf(((PetscObject)a_A)->comm,"[%d]%s bs=%d MM=%d\n",0,__FUNCT__,bs,MM); 710 ierr = PCSetCoordinates_AGG( pc, bs, MM/bs, PETSC_NULL ); CHKERRQ(ierr); 711 } 712 else { 713 PetscReal *nullvec; 714 PetscBool has_const; 715 PetscInt i,j,mlocal,nvec,bs; 716 const Vec *vecs; const PetscScalar *v; 717 ierr = MatGetLocalSize(a_A,&mlocal,PETSC_NULL);CHKERRQ(ierr); 718 ierr = MatNullSpaceGetVecs( mnull, &has_const, &nvec, &vecs ); CHKERRQ(ierr); 719 ierr = PetscMalloc((nvec+!!has_const)*mlocal*sizeof *nullvec,&nullvec);CHKERRQ(ierr); 720 if (has_const) for (i=0; i<mlocal; i++) nullvec[i] = 1.0; 721 for (i=0; i<nvec; i++) { 722 ierr = VecGetArrayRead(vecs[i],&v);CHKERRQ(ierr); 723 for (j=0; j<mlocal; j++) nullvec[(i+!!has_const)*mlocal + j] = PetscRealPart(v[j]); 724 ierr = VecRestoreArrayRead(vecs[i],&v);CHKERRQ(ierr); 725 } 726 pc_gamg->data = nullvec; 727 pc_gamg->data_cell_cols = (nvec+!!has_const); 728 ierr = MatGetBlockSize( a_A, &bs ); CHKERRQ( ierr ); /* this does not work for Stokes */ 729 pc_gamg->data_cell_rows = bs; 730 } 731 PetscFunctionReturn(0); 732 } 733 734 /* -------------------------------------------------------------------------- */ 735 /* 736 formProl0 737 738 Input Parameter: 739 . agg_llists - list of arrays with aggregates 740 . bs - block size 741 . nSAvec - column bs of new P 742 . my0crs - global index of start of locals 743 . data_stride - bs*(nloc nodes + ghost nodes) 744 . data_in[data_stride*nSAvec] - local data on fine grid 745 . flid_fgid[data_stride/bs] - make local to global IDs, includes ghosts in 'locals_llist' 746 Output Parameter: 747 . a_data_out - in with fine grid data (w/ghosts), out with coarse grid data 748 . a_Prol - prolongation operator 749 */ 750 #undef __FUNCT__ 751 #define __FUNCT__ "formProl0" 752 static PetscErrorCode formProl0(const PetscCoarsenData *agg_llists,/* list from selected vertices of aggregate unselected vertices */ 753 const PetscInt bs, /* (row) block size */ 754 const PetscInt nSAvec, /* column bs */ 755 const PetscInt my0crs, /* global index of start of locals */ 756 const PetscInt data_stride, /* (nloc+nghost)*bs */ 757 PetscReal data_in[], /* [data_stride][nSAvec] */ 758 const PetscInt flid_fgid[], /* [data_stride/bs] */ 759 PetscReal **a_data_out, 760 Mat a_Prol /* prolongation operator (output)*/ 761 ) 762 { 763 PetscErrorCode ierr; 764 PetscInt Istart,my0,Iend,nloc,clid,flid,aggID,kk,jj,ii,mm,ndone,nSelected,minsz,nghosts,out_data_stride; 765 MPI_Comm wcomm = ((PetscObject)a_Prol)->comm; 766 PetscMPIInt mype, npe; 767 PetscReal *out_data; 768 PetscCDPos pos; 769 GAMGHashTable fgid_flid; 770 771 /* #define OUT_AGGS */ 772 #ifdef OUT_AGGS 773 static PetscInt llev = 0; char fname[32]; FILE *file = PETSC_NULL; PetscInt pM; 774 #endif 775 776 PetscFunctionBegin; 777 ierr = MPI_Comm_rank(wcomm,&mype);CHKERRQ(ierr); 778 ierr = MPI_Comm_size(wcomm,&npe);CHKERRQ(ierr); 779 ierr = MatGetOwnershipRange( a_Prol, &Istart, &Iend ); CHKERRQ(ierr); 780 nloc = (Iend-Istart)/bs; my0 = Istart/bs; assert((Iend-Istart)%bs==0); 781 Iend /= bs; 782 nghosts = data_stride/bs - nloc; 783 784 ierr = GAMGTableCreate( 2*nghosts, &fgid_flid ); CHKERRQ(ierr); 785 for(kk=0;kk<nghosts;kk++) { 786 ierr = GAMGTableAdd( &fgid_flid, flid_fgid[nloc+kk], nloc+kk ); CHKERRQ(ierr); 787 } 788 789 #ifdef OUT_AGGS 790 sprintf(fname,"aggs_%d_%d.m",llev++,mype); 791 if(llev==1) { 792 file = fopen(fname,"w"); 793 } 794 MatGetSize( a_Prol, &pM, &jj ); 795 #endif 796 797 /* count selected -- same as number of cols of P */ 798 for(nSelected=mm=0;mm<nloc;mm++) { 799 PetscBool ise; 800 ierr = PetscCDEmptyAt( agg_llists, mm, &ise ); CHKERRQ(ierr); 801 if( !ise ) nSelected++; 802 } 803 ierr = MatGetOwnershipRangeColumn( a_Prol, &ii, &jj ); CHKERRQ(ierr); 804 assert((ii/nSAvec)==my0crs); assert(nSelected==(jj-ii)/nSAvec); 805 806 /* aloc space for coarse point data (output) */ 807 out_data_stride = nSelected*nSAvec; 808 ierr = PetscMalloc( out_data_stride*nSAvec*sizeof(PetscReal), &out_data ); CHKERRQ(ierr); 809 for(ii=0;ii<out_data_stride*nSAvec;ii++) { 810 out_data[ii]=1.e300; 811 } 812 *a_data_out = out_data; /* output - stride nSelected*nSAvec */ 813 814 /* find points and set prolongation */ 815 minsz = 100; 816 ndone = 0; 817 for( mm = clid = 0 ; mm < nloc ; mm++ ){ 818 ierr = PetscCDSizeAt( agg_llists, mm, &jj ); CHKERRQ(ierr); 819 if( jj > 0 ) { 820 const PetscInt lid = mm, cgid = my0crs + clid; 821 PetscInt cids[100]; /* max bs */ 822 PetscBLASInt asz=jj,M=asz*bs,N=nSAvec,INFO; 823 PetscBLASInt Mdata=M+((N-M>0)?N-M:0),LDA=Mdata,LWORK=N*bs; 824 PetscScalar *qqc,*qqr,*TAU,*WORK; 825 PetscInt *fids; 826 PetscReal *data; 827 /* count agg */ 828 if( asz<minsz ) minsz = asz; 829 830 /* get block */ 831 ierr = PetscMalloc( (Mdata*N)*sizeof(PetscScalar), &qqc ); CHKERRQ(ierr); 832 ierr = PetscMalloc( (M*N)*sizeof(PetscScalar), &qqr ); CHKERRQ(ierr); 833 ierr = PetscMalloc( N*sizeof(PetscScalar), &TAU ); CHKERRQ(ierr); 834 ierr = PetscMalloc( LWORK*sizeof(PetscScalar), &WORK ); CHKERRQ(ierr); 835 ierr = PetscMalloc( M*sizeof(PetscInt), &fids ); CHKERRQ(ierr); 836 837 aggID = 0; 838 ierr = PetscCDGetHeadPos(agg_llists,lid,&pos); CHKERRQ(ierr); 839 while(pos){ 840 PetscInt gid1; 841 ierr = PetscLLNGetID( pos, &gid1 ); CHKERRQ(ierr); 842 ierr = PetscCDGetNextPos(agg_llists,lid,&pos); CHKERRQ(ierr); 843 844 if( gid1 >= my0 && gid1 < Iend ) flid = gid1 - my0; 845 else { 846 ierr = GAMGTableFind( &fgid_flid, gid1, &flid ); CHKERRQ(ierr); 847 assert(flid>=0); 848 } 849 /* copy in B_i matrix - column oriented */ 850 data = &data_in[flid*bs]; 851 for( kk = ii = 0; ii < bs ; ii++ ) { 852 for( jj = 0; jj < N ; jj++ ) { 853 PetscReal d = data[jj*data_stride + ii]; 854 qqc[jj*Mdata + aggID*bs + ii] = d; 855 } 856 } 857 #ifdef OUT_AGGS 858 if(llev==1) { 859 char str[] = "plot(%e,%e,'r*'), hold on,\n", col[] = "rgbkmc", sim[] = "*os+h>d<vx^"; 860 PetscInt MM,pi,pj; 861 str[12] = col[(clid+17*mype)%6]; str[13] = sim[(clid+17*mype)%11]; 862 MM = (PetscInt)(PetscSqrtReal((PetscReal)pM)); 863 pj = gid1/MM; pi = gid1%MM; 864 fprintf(file,str,(double)pi,(double)pj); 865 /* fprintf(file,str,data[2*data_stride+1],-data[2*data_stride]); */ 866 } 867 #endif 868 /* set fine IDs */ 869 for(kk=0;kk<bs;kk++) fids[aggID*bs + kk] = flid_fgid[flid]*bs + kk; 870 871 aggID++; 872 } 873 874 /* pad with zeros */ 875 for( ii = asz*bs; ii < Mdata ; ii++ ) { 876 for( jj = 0; jj < N ; jj++, kk++ ) { 877 qqc[jj*Mdata + ii] = .0; 878 } 879 } 880 881 ndone += aggID; 882 /* QR */ 883 LAPACKgeqrf_( &Mdata, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO ); 884 if( INFO != 0 ) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"xGEQRS error"); 885 /* get R - column oriented - output B_{i+1} */ 886 { 887 PetscReal *data = &out_data[clid*nSAvec]; 888 for( jj = 0; jj < nSAvec ; jj++ ) { 889 for( ii = 0; ii < nSAvec ; ii++ ) { 890 assert(data[jj*out_data_stride + ii] == 1.e300); 891 if( ii <= jj ) data[jj*out_data_stride + ii] = PetscRealPart(qqc[jj*Mdata + ii]); 892 else data[jj*out_data_stride + ii] = 0.; 893 } 894 } 895 } 896 897 /* get Q - row oriented */ 898 LAPACKungqr_( &Mdata, &N, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO ); 899 if( INFO != 0 ) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"xORGQR error arg %d",-INFO); 900 901 for( ii = 0 ; ii < M ; ii++ ){ 902 for( jj = 0 ; jj < N ; jj++ ) { 903 qqr[N*ii + jj] = qqc[jj*Mdata + ii]; 904 } 905 } 906 907 /* add diagonal block of P0 */ 908 for(kk=0;kk<N;kk++) { 909 cids[kk] = N*cgid + kk; /* global col IDs in P0 */ 910 } 911 ierr = MatSetValues(a_Prol,M,fids,N,cids,qqr,INSERT_VALUES); CHKERRQ(ierr); 912 913 ierr = PetscFree( qqc ); CHKERRQ(ierr); 914 ierr = PetscFree( qqr ); CHKERRQ(ierr); 915 ierr = PetscFree( TAU ); CHKERRQ(ierr); 916 ierr = PetscFree( WORK ); CHKERRQ(ierr); 917 ierr = PetscFree( fids ); CHKERRQ(ierr); 918 clid++; 919 } /* coarse agg */ 920 } /* for all fine nodes */ 921 ierr = MatAssemblyBegin(a_Prol,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 922 ierr = MatAssemblyEnd(a_Prol,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 923 924 /* ierr = MPI_Allreduce( &ndone, &ii, 1, MPIU_INT, MPIU_SUM, wcomm ); */ 925 /* MatGetSize( a_Prol, &kk, &jj ); */ 926 /* ierr = MPI_Allreduce( &minsz, &jj, 1, MPIU_INT, MPIU_MIN, wcomm ); */ 927 /* PetscPrintf(wcomm," **** [%d]%s %d total done, %d nodes (%d local done), min agg. size = %d\n",mype,__FUNCT__,ii,kk/bs,ndone,jj); */ 928 929 #ifdef OUT_AGGS 930 if(llev==1) fclose(file); 931 #endif 932 ierr = GAMGTableDestroy( &fgid_flid ); CHKERRQ(ierr); 933 934 PetscFunctionReturn(0); 935 } 936 937 /* -------------------------------------------------------------------------- */ 938 /* 939 PCGAMGgraph_AGG 940 941 Input Parameter: 942 . pc - this 943 . Amat - matrix on this fine level 944 Output Parameter: 945 . a_Gmat - 946 */ 947 #undef __FUNCT__ 948 #define __FUNCT__ "PCGAMGgraph_AGG" 949 PetscErrorCode PCGAMGgraph_AGG( PC pc, 950 const Mat Amat, 951 Mat *a_Gmat 952 ) 953 { 954 PetscErrorCode ierr; 955 PC_MG *mg = (PC_MG*)pc->data; 956 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 957 const PetscInt verbose = pc_gamg->verbose; 958 const PetscReal vfilter = pc_gamg->threshold; 959 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 960 PetscMPIInt mype,npe; 961 Mat Gmat; 962 MPI_Comm wcomm = ((PetscObject)Amat)->comm; 963 PetscBool set,flg,symm; 964 965 PetscFunctionBegin; 966 #if defined PETSC_USE_LOG 967 ierr = PetscLogEventBegin(PC_GAMGGgraph_AGG,0,0,0,0);CHKERRQ(ierr); 968 #endif 969 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 970 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 971 972 ierr = MatIsSymmetricKnown(Amat, &set, &flg); CHKERRQ(ierr); 973 symm = (PetscBool)(pc_gamg_agg->sym_graph || !(set && flg)); 974 975 ierr = PCGAMGCreateGraph( Amat, &Gmat ); CHKERRQ( ierr ); 976 ierr = PCGAMGFilterGraph( &Gmat, vfilter, symm, verbose ); CHKERRQ( ierr ); 977 978 *a_Gmat = Gmat; 979 980 #if defined PETSC_USE_LOG 981 ierr = PetscLogEventEnd(PC_GAMGGgraph_AGG,0,0,0,0);CHKERRQ(ierr); 982 #endif 983 PetscFunctionReturn(0); 984 } 985 986 /* -------------------------------------------------------------------------- */ 987 /* 988 PCGAMGCoarsen_AGG 989 990 Input Parameter: 991 . a_pc - this 992 Input/Output Parameter: 993 . a_Gmat1 - graph on this fine level - coarsening can change this (squares it) 994 Output Parameter: 995 . agg_lists - list of aggregates 996 */ 997 #undef __FUNCT__ 998 #define __FUNCT__ "PCGAMGCoarsen_AGG" 999 PetscErrorCode PCGAMGCoarsen_AGG( PC a_pc, 1000 Mat *a_Gmat1, 1001 PetscCoarsenData **agg_lists 1002 ) 1003 { 1004 PetscErrorCode ierr; 1005 PC_MG *mg = (PC_MG*)a_pc->data; 1006 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1007 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 1008 Mat mat,Gmat2, Gmat1 = *a_Gmat1; /* squared graph */ 1009 IS perm; 1010 PetscInt Ii,nloc,bs,n,m; 1011 PetscInt *permute; 1012 PetscBool *bIndexSet; 1013 MatCoarsen crs; 1014 MPI_Comm wcomm = ((PetscObject)Gmat1)->comm; 1015 PetscMPIInt mype,npe; 1016 1017 PetscFunctionBegin; 1018 #if defined PETSC_USE_LOG 1019 ierr = PetscLogEventBegin(PC_GAMGCoarsen_AGG,0,0,0,0);CHKERRQ(ierr); 1020 #endif 1021 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 1022 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 1023 ierr = MatGetLocalSize( Gmat1, &n, &m ); CHKERRQ(ierr); 1024 ierr = MatGetBlockSize( Gmat1, &bs ); CHKERRQ(ierr); assert(bs==1); 1025 nloc = n/bs; 1026 1027 if( pc_gamg_agg->square_graph ) { 1028 ierr = MatTransposeMatMult( Gmat1, Gmat1, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Gmat2 ); 1029 CHKERRQ(ierr); 1030 } 1031 else Gmat2 = Gmat1; 1032 1033 /* get MIS aggs */ 1034 /* randomize */ 1035 ierr = PetscMalloc( nloc*sizeof(PetscInt), &permute ); CHKERRQ(ierr); 1036 ierr = PetscMalloc( nloc*sizeof(PetscBool), &bIndexSet ); CHKERRQ(ierr); 1037 for ( Ii = 0; Ii < nloc ; Ii++ ){ 1038 bIndexSet[Ii] = PETSC_FALSE; 1039 permute[Ii] = Ii; 1040 } 1041 srand(1); /* make deterministic */ 1042 for ( Ii = 0; Ii < nloc ; Ii++ ) { 1043 PetscInt iSwapIndex = rand()%nloc; 1044 if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) { 1045 PetscInt iTemp = permute[iSwapIndex]; 1046 permute[iSwapIndex] = permute[Ii]; 1047 permute[Ii] = iTemp; 1048 bIndexSet[iSwapIndex] = PETSC_TRUE; 1049 } 1050 } 1051 ierr = PetscFree( bIndexSet ); CHKERRQ(ierr); 1052 1053 ierr = ISCreateGeneral(PETSC_COMM_SELF, nloc, permute, PETSC_USE_POINTER, &perm); 1054 CHKERRQ(ierr); 1055 #if defined PETSC_GAMG_USE_LOG 1056 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET4],0,0,0,0);CHKERRQ(ierr); 1057 #endif 1058 ierr = MatCoarsenCreate( wcomm, &crs ); CHKERRQ(ierr); 1059 /* ierr = MatCoarsenSetType( crs, MATCOARSENMIS ); CHKERRQ(ierr); */ 1060 ierr = MatCoarsenSetFromOptions( crs ); CHKERRQ(ierr); 1061 ierr = MatCoarsenSetGreedyOrdering( crs, perm ); CHKERRQ(ierr); 1062 ierr = MatCoarsenSetAdjacency( crs, Gmat2 ); CHKERRQ(ierr); 1063 ierr = MatCoarsenSetVerbose( crs, pc_gamg->verbose ); CHKERRQ(ierr); 1064 ierr = MatCoarsenSetStrictAggs( crs, PETSC_TRUE ); CHKERRQ(ierr); 1065 ierr = MatCoarsenApply( crs ); CHKERRQ(ierr); 1066 ierr = MatCoarsenGetData( crs, agg_lists ); CHKERRQ(ierr); /* output */ 1067 ierr = MatCoarsenDestroy( &crs ); CHKERRQ(ierr); 1068 1069 ierr = ISDestroy( &perm ); CHKERRQ(ierr); 1070 ierr = PetscFree( permute ); CHKERRQ(ierr); 1071 #if defined PETSC_GAMG_USE_LOG 1072 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET4],0,0,0,0);CHKERRQ(ierr); 1073 #endif 1074 /* smooth aggs */ 1075 if( Gmat2 != Gmat1 ) { 1076 const PetscCoarsenData *llist = *agg_lists; 1077 ierr = smoothAggs( Gmat2, Gmat1, *agg_lists ); CHKERRQ(ierr); 1078 ierr = MatDestroy( &Gmat1 ); CHKERRQ(ierr); 1079 *a_Gmat1 = Gmat2; /* output */ 1080 ierr = PetscCDGetMat( llist, &mat ); CHKERRQ(ierr); 1081 if(mat) SETERRQ(wcomm,PETSC_ERR_ARG_WRONG, "Auxilary matrix with squared graph????"); 1082 } 1083 else { 1084 const PetscCoarsenData *llist = *agg_lists; 1085 /* see if we have a matrix that takes pecedence (returned from MatCoarsenAppply) */ 1086 ierr = PetscCDGetMat( llist, &mat ); CHKERRQ(ierr); 1087 if( mat ) { 1088 ierr = MatDestroy( &Gmat1 ); CHKERRQ(ierr); 1089 *a_Gmat1 = mat; /* output */ 1090 } 1091 } 1092 #if defined PETSC_USE_LOG 1093 ierr = PetscLogEventEnd(PC_GAMGCoarsen_AGG,0,0,0,0);CHKERRQ(ierr); 1094 #endif 1095 PetscFunctionReturn(0); 1096 } 1097 1098 /* -------------------------------------------------------------------------- */ 1099 /* 1100 PCGAMGProlongator_AGG 1101 1102 Input Parameter: 1103 . pc - this 1104 . Amat - matrix on this fine level 1105 . Graph - used to get ghost data for nodes in 1106 . agg_lists - list of aggregates 1107 Output Parameter: 1108 . a_P_out - prolongation operator to the next level 1109 */ 1110 #undef __FUNCT__ 1111 #define __FUNCT__ "PCGAMGProlongator_AGG" 1112 PetscErrorCode PCGAMGProlongator_AGG( PC pc, 1113 const Mat Amat, 1114 const Mat Gmat, 1115 PetscCoarsenData *agg_lists, 1116 Mat *a_P_out 1117 ) 1118 { 1119 PC_MG *mg = (PC_MG*)pc->data; 1120 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1121 const PetscInt verbose = pc_gamg->verbose; 1122 const PetscInt data_cols = pc_gamg->data_cell_cols; 1123 PetscErrorCode ierr; 1124 PetscInt Istart,Iend,nloc,ii,jj,kk,my0,nLocalSelected,bs; 1125 Mat Prol; 1126 PetscMPIInt mype, npe; 1127 MPI_Comm wcomm = ((PetscObject)Amat)->comm; 1128 const PetscInt col_bs = data_cols; 1129 PetscReal *data_w_ghost; 1130 PetscInt myCrs0, nbnodes=0, *flid_fgid; 1131 1132 PetscFunctionBegin; 1133 #if defined PETSC_USE_LOG 1134 ierr = PetscLogEventBegin(PC_GAMGProlongator_AGG,0,0,0,0);CHKERRQ(ierr); 1135 #endif 1136 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 1137 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 1138 ierr = MatGetOwnershipRange( Amat, &Istart, &Iend ); CHKERRQ(ierr); 1139 ierr = MatGetBlockSize( Amat, &bs ); CHKERRQ( ierr ); 1140 nloc = (Iend-Istart)/bs; my0 = Istart/bs; assert((Iend-Istart)%bs==0); 1141 1142 /* get 'nLocalSelected' */ 1143 for( ii=0, nLocalSelected = 0 ; ii < nloc ; ii++ ){ 1144 PetscBool ise; 1145 /* filter out singletons 0 or 1? */ 1146 ierr = PetscCDEmptyAt( agg_lists, ii, &ise ); CHKERRQ(ierr); 1147 if( !ise ) nLocalSelected++; 1148 } 1149 1150 /* create prolongator, create P matrix */ 1151 ierr = MatCreate( wcomm, &Prol ); CHKERRQ(ierr); 1152 ierr = MatSetSizes(Prol,nloc*bs,nLocalSelected*col_bs,PETSC_DETERMINE,PETSC_DETERMINE); 1153 CHKERRQ(ierr); 1154 ierr = MatSetBlockSizes( Prol, bs, col_bs ); CHKERRQ(ierr); 1155 ierr = MatSetType( Prol, MATAIJ ); CHKERRQ(ierr); 1156 ierr = MatSeqAIJSetPreallocation( Prol, data_cols, PETSC_NULL); CHKERRQ(ierr); 1157 ierr = MatMPIAIJSetPreallocation(Prol,data_cols, PETSC_NULL,data_cols, PETSC_NULL);CHKERRQ(ierr); 1158 /* nloc*bs, nLocalSelected*col_bs, */ 1159 /* PETSC_DETERMINE, PETSC_DETERMINE, */ 1160 /* data_cols, PETSC_NULL, data_cols, PETSC_NULL, */ 1161 /* &Prol ); */ 1162 1163 /* can get all points "removed" */ 1164 ierr = MatGetSize( Prol, &kk, &ii ); CHKERRQ(ierr); 1165 if( ii==0 ) { 1166 if( verbose ) { 1167 PetscPrintf(wcomm,"[%d]%s no selected points on coarse grid\n",mype,__FUNCT__); 1168 } 1169 ierr = MatDestroy( &Prol ); CHKERRQ(ierr); 1170 *a_P_out = PETSC_NULL; /* out */ 1171 PetscFunctionReturn(0); 1172 } 1173 if( verbose ) { 1174 PetscPrintf(wcomm,"\t\t[%d]%s New grid %d nodes\n",mype,__FUNCT__,ii/col_bs); 1175 } 1176 ierr = MatGetOwnershipRangeColumn( Prol, &myCrs0, &kk ); CHKERRQ(ierr); 1177 1178 assert((kk-myCrs0)%col_bs==0); 1179 myCrs0 = myCrs0/col_bs; 1180 assert((kk/col_bs-myCrs0)==nLocalSelected); 1181 1182 /* create global vector of data in 'data_w_ghost' */ 1183 #if defined PETSC_GAMG_USE_LOG 1184 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET7],0,0,0,0);CHKERRQ(ierr); 1185 #endif 1186 if (npe > 1) { /* */ 1187 PetscReal *tmp_gdata,*tmp_ldata,*tp2; 1188 ierr = PetscMalloc( nloc*sizeof(PetscReal), &tmp_ldata ); CHKERRQ(ierr); 1189 for( jj = 0 ; jj < data_cols ; jj++ ){ 1190 for( kk = 0 ; kk < bs ; kk++) { 1191 PetscInt ii,stride; 1192 const PetscReal *tp = pc_gamg->data + jj*bs*nloc + kk; 1193 for( ii = 0 ; ii < nloc ; ii++, tp += bs ){ 1194 tmp_ldata[ii] = *tp; 1195 } 1196 ierr = PCGAMGGetDataWithGhosts( Gmat, 1, tmp_ldata, &stride, &tmp_gdata ); 1197 CHKERRQ(ierr); 1198 1199 if(jj==0 && kk==0) { /* now I know how many todal nodes - allocate */ 1200 ierr = PetscMalloc( stride*bs*data_cols*sizeof(PetscReal), &data_w_ghost ); CHKERRQ(ierr); 1201 nbnodes = bs*stride; 1202 } 1203 tp2 = data_w_ghost + jj*bs*stride + kk; 1204 for( ii = 0 ; ii < stride ; ii++, tp2 += bs ){ 1205 *tp2 = tmp_gdata[ii]; 1206 } 1207 ierr = PetscFree( tmp_gdata ); CHKERRQ(ierr); 1208 } 1209 } 1210 ierr = PetscFree( tmp_ldata ); CHKERRQ(ierr); 1211 } 1212 else { 1213 nbnodes = bs*nloc; 1214 data_w_ghost = (PetscReal*)pc_gamg->data; 1215 } 1216 1217 /* get P0 */ 1218 if( npe > 1 ){ 1219 PetscReal *fid_glid_loc,*fiddata; 1220 PetscInt stride; 1221 1222 ierr = PetscMalloc( nloc*sizeof(PetscReal), &fid_glid_loc ); CHKERRQ(ierr); 1223 for(kk=0;kk<nloc;kk++) fid_glid_loc[kk] = (PetscReal)(my0+kk); 1224 ierr = PCGAMGGetDataWithGhosts( Gmat, 1, fid_glid_loc, &stride, &fiddata ); 1225 CHKERRQ(ierr); 1226 ierr = PetscMalloc( stride*sizeof(PetscInt), &flid_fgid ); CHKERRQ(ierr); 1227 for(kk=0;kk<stride;kk++) flid_fgid[kk] = (PetscInt)fiddata[kk]; 1228 ierr = PetscFree( fiddata ); CHKERRQ(ierr); 1229 1230 assert(stride==nbnodes/bs); 1231 ierr = PetscFree( fid_glid_loc ); CHKERRQ(ierr); 1232 } 1233 else { 1234 ierr = PetscMalloc( nloc*sizeof(PetscInt), &flid_fgid ); CHKERRQ(ierr); 1235 for(kk=0;kk<nloc;kk++) flid_fgid[kk] = my0 + kk; 1236 } 1237 #if defined PETSC_GAMG_USE_LOG 1238 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET7],0,0,0,0);CHKERRQ(ierr); 1239 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET8],0,0,0,0);CHKERRQ(ierr); 1240 #endif 1241 { 1242 PetscReal *data_out = PETSC_NULL; 1243 ierr = formProl0( agg_lists, bs, data_cols, myCrs0, nbnodes, 1244 data_w_ghost, flid_fgid, &data_out, Prol ); 1245 CHKERRQ(ierr); 1246 ierr = PetscFree( pc_gamg->data ); CHKERRQ( ierr ); 1247 pc_gamg->data = data_out; 1248 pc_gamg->data_cell_rows = data_cols; 1249 pc_gamg->data_sz = data_cols*data_cols*nLocalSelected; 1250 } 1251 #if defined PETSC_GAMG_USE_LOG 1252 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET8],0,0,0,0);CHKERRQ(ierr); 1253 #endif 1254 if (npe > 1) ierr = PetscFree( data_w_ghost ); CHKERRQ(ierr); 1255 ierr = PetscFree( flid_fgid ); CHKERRQ(ierr); 1256 1257 *a_P_out = Prol; /* out */ 1258 #if defined PETSC_USE_LOG 1259 ierr = PetscLogEventEnd(PC_GAMGProlongator_AGG,0,0,0,0);CHKERRQ(ierr); 1260 #endif 1261 PetscFunctionReturn(0); 1262 } 1263 1264 /* -------------------------------------------------------------------------- */ 1265 /* 1266 PCGAMGOptprol_AGG 1267 1268 Input Parameter: 1269 . pc - this 1270 . Amat - matrix on this fine level 1271 In/Output Parameter: 1272 . a_P_out - prolongation operator to the next level 1273 */ 1274 #undef __FUNCT__ 1275 #define __FUNCT__ "PCGAMGOptprol_AGG" 1276 PetscErrorCode PCGAMGOptprol_AGG( PC pc, 1277 const Mat Amat, 1278 Mat *a_P 1279 ) 1280 { 1281 PetscErrorCode ierr; 1282 PC_MG *mg = (PC_MG*)pc->data; 1283 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1284 const PetscInt verbose = pc_gamg->verbose; 1285 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; 1286 PetscInt jj; 1287 PetscMPIInt mype,npe; 1288 Mat Prol = *a_P; 1289 MPI_Comm wcomm = ((PetscObject)Amat)->comm; 1290 1291 PetscFunctionBegin; 1292 #if defined PETSC_USE_LOG 1293 ierr = PetscLogEventBegin(PC_GAMGOptprol_AGG,0,0,0,0);CHKERRQ(ierr); 1294 #endif 1295 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 1296 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 1297 1298 /* smooth P0 */ 1299 for( jj = 0 ; jj < pc_gamg_agg->nsmooths ; jj++ ){ 1300 Mat tMat; 1301 Vec diag; 1302 PetscReal alpha, emax, emin; 1303 #if defined PETSC_GAMG_USE_LOG 1304 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET9],0,0,0,0);CHKERRQ(ierr); 1305 #endif 1306 if( jj == 0 ) { 1307 KSP eksp; 1308 Vec bb, xx; 1309 PC pc; 1310 ierr = MatGetVecs( Amat, &bb, 0 ); CHKERRQ(ierr); 1311 ierr = MatGetVecs( Amat, &xx, 0 ); CHKERRQ(ierr); 1312 { 1313 PetscRandom rctx; 1314 ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr); 1315 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 1316 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 1317 ierr = PetscRandomDestroy( &rctx ); CHKERRQ(ierr); 1318 } 1319 ierr = KSPCreate(wcomm,&eksp); CHKERRQ(ierr); 1320 ierr = KSPAppendOptionsPrefix( eksp, "gamg_est_"); CHKERRQ(ierr); 1321 ierr = KSPSetFromOptions( eksp ); CHKERRQ(ierr); 1322 ierr = KSPSetInitialGuessNonzero( eksp, PETSC_FALSE ); CHKERRQ(ierr); 1323 ierr = KSPSetOperators( eksp, Amat, Amat, SAME_NONZERO_PATTERN ); 1324 CHKERRQ( ierr ); 1325 ierr = KSPGetPC( eksp, &pc ); CHKERRQ( ierr ); 1326 ierr = PCSetType( pc, PCJACOBI ); CHKERRQ(ierr); /* smoother */ 1327 ierr = KSPSetTolerances(eksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,10); 1328 CHKERRQ(ierr); 1329 ierr = KSPSetNormType( eksp, KSP_NORM_NONE ); CHKERRQ(ierr); 1330 ierr = KSPSetComputeSingularValues( eksp,PETSC_TRUE ); CHKERRQ(ierr); 1331 1332 /* solve - keep stuff out of logging */ 1333 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 1334 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 1335 ierr = KSPSolve( eksp, bb, xx ); CHKERRQ(ierr); 1336 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 1337 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 1338 1339 ierr = KSPComputeExtremeSingularValues( eksp, &emax, &emin ); CHKERRQ(ierr); 1340 if( verbose ) { 1341 PetscPrintf(wcomm,"\t\t\t%s smooth P0: max eigen=%e min=%e PC=%s\n", 1342 __FUNCT__,emax,emin,PCJACOBI); 1343 } 1344 ierr = VecDestroy( &xx ); CHKERRQ(ierr); 1345 ierr = VecDestroy( &bb ); CHKERRQ(ierr); 1346 ierr = KSPDestroy( &eksp ); CHKERRQ(ierr); 1347 1348 if( pc_gamg->emax_id == -1 ) { 1349 ierr = PetscObjectComposedDataRegister( &pc_gamg->emax_id ); 1350 assert(pc_gamg->emax_id != -1 ); 1351 } 1352 ierr = PetscObjectComposedDataSetScalar( (PetscObject)Amat, pc_gamg->emax_id, emax ); CHKERRQ(ierr); 1353 } 1354 1355 /* smooth P1 := (I - omega/lam D^{-1}A)P0 */ 1356 ierr = MatMatMult( Amat, Prol, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tMat ); CHKERRQ(ierr); 1357 ierr = MatGetVecs( Amat, &diag, 0 ); CHKERRQ(ierr); 1358 ierr = MatGetDiagonal( Amat, diag ); CHKERRQ(ierr); /* effectively PCJACOBI */ 1359 ierr = VecReciprocal( diag ); CHKERRQ(ierr); 1360 ierr = MatDiagonalScale( tMat, diag, 0 ); CHKERRQ(ierr); 1361 ierr = VecDestroy( &diag ); CHKERRQ(ierr); 1362 alpha = -1.5/emax; 1363 ierr = MatAYPX( tMat, alpha, Prol, SUBSET_NONZERO_PATTERN ); CHKERRQ(ierr); 1364 ierr = MatDestroy( &Prol ); CHKERRQ(ierr); 1365 Prol = tMat; 1366 #if defined PETSC_GAMG_USE_LOG 1367 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET9],0,0,0,0);CHKERRQ(ierr); 1368 #endif 1369 } 1370 #if defined PETSC_USE_LOG 1371 ierr = PetscLogEventEnd(PC_GAMGOptprol_AGG,0,0,0,0);CHKERRQ(ierr); 1372 #endif 1373 *a_P = Prol; 1374 1375 PetscFunctionReturn(0); 1376 } 1377 1378 /* -------------------------------------------------------------------------- */ 1379 /* 1380 PCGAMGKKTProl_AGG 1381 1382 Input Parameter: 1383 . pc - this 1384 . Prol11 - matrix on this fine level 1385 . A21 - matrix on this fine level 1386 In/Output Parameter: 1387 . a_P22 - prolongation operator to the next level 1388 */ 1389 #undef __FUNCT__ 1390 #define __FUNCT__ "PCGAMGKKTProl_AGG" 1391 PetscErrorCode PCGAMGKKTProl_AGG( PC pc, 1392 const Mat Prol11, 1393 const Mat A21, 1394 Mat *a_P22 1395 ) 1396 { 1397 PetscErrorCode ierr; 1398 PC_MG *mg = (PC_MG*)pc->data; 1399 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1400 const PetscInt verbose = pc_gamg->verbose; 1401 /* PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx; */ 1402 PetscMPIInt mype,npe; 1403 Mat Prol22,Tmat,Gmat; 1404 MPI_Comm wcomm = ((PetscObject)pc)->comm; 1405 PetscCoarsenData *agg_lists; 1406 1407 PetscFunctionBegin; 1408 #if defined PETSC_USE_LOG 1409 ierr = PetscLogEventBegin(PC_GAMGKKTProl_AGG,0,0,0,0); CHKERRQ(ierr); 1410 #endif 1411 ierr = MPI_Comm_rank( wcomm, &mype); CHKERRQ(ierr); 1412 ierr = MPI_Comm_size( wcomm, &npe); CHKERRQ(ierr); 1413 1414 /* form C graph */ 1415 ierr = MatMatMult( A21, Prol11, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Tmat); CHKERRQ(ierr); 1416 ierr = MatMatTransposeMult(Tmat, Tmat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Gmat ); CHKERRQ(ierr); 1417 ierr = MatDestroy(&Tmat); CHKERRQ(ierr); 1418 ierr = PCGAMGFilterGraph(&Gmat, 0.0, PETSC_FALSE, verbose); CHKERRQ(ierr); 1419 1420 /* coarsen constraints */ 1421 { 1422 MatCoarsen crs; 1423 ierr = MatCoarsenCreate( wcomm, &crs ); CHKERRQ(ierr); 1424 ierr = MatCoarsenSetType( crs, MATCOARSENMIS ); CHKERRQ(ierr); 1425 ierr = MatCoarsenSetAdjacency( crs, Gmat ); CHKERRQ(ierr); 1426 ierr = MatCoarsenSetVerbose( crs, verbose ); CHKERRQ(ierr); 1427 ierr = MatCoarsenSetStrictAggs( crs, PETSC_TRUE ); CHKERRQ(ierr); 1428 ierr = MatCoarsenApply( crs ); CHKERRQ(ierr); 1429 ierr = MatCoarsenGetData( crs, &agg_lists ); CHKERRQ(ierr); 1430 ierr = MatCoarsenDestroy( &crs ); CHKERRQ(ierr); 1431 } 1432 1433 /* form simple prolongation 'Prol22' */ 1434 { 1435 PetscInt ii,mm,clid,my0,nloc,nLocalSelected; 1436 PetscScalar val = 1.0; 1437 /* get 'nLocalSelected' */ 1438 ierr = MatGetLocalSize( Gmat, &nloc, &ii ); CHKERRQ(ierr); 1439 for( ii=0, nLocalSelected = 0 ; ii < nloc ; ii++ ){ 1440 PetscBool ise; 1441 /* filter out singletons 0 or 1? */ 1442 ierr = PetscCDEmptyAt( agg_lists, ii, &ise ); CHKERRQ(ierr); 1443 if( !ise ) nLocalSelected++; 1444 } 1445 1446 ierr = MatCreate(wcomm,&Prol22);CHKERRQ(ierr); 1447 ierr = MatSetSizes( Prol22,nloc, nLocalSelected, 1448 PETSC_DETERMINE, PETSC_DETERMINE); 1449 CHKERRQ(ierr); 1450 ierr = MatSetType( Prol22, MATAIJ ); CHKERRQ(ierr); 1451 ierr = MatSeqAIJSetPreallocation(Prol22,1,PETSC_NULL); CHKERRQ(ierr); 1452 ierr = MatMPIAIJSetPreallocation(Prol22,1,PETSC_NULL,1,PETSC_NULL); 1453 CHKERRQ(ierr); 1454 /* ierr = MatCreateAIJ( wcomm, */ 1455 /* nloc, nLocalSelected, */ 1456 /* PETSC_DETERMINE, PETSC_DETERMINE, */ 1457 /* 1, PETSC_NULL, 1, PETSC_NULL, */ 1458 /* &Prol22 ); */ 1459 1460 ierr = MatGetOwnershipRange( Prol22, &my0, &ii ); CHKERRQ(ierr); 1461 nloc = ii - my0; 1462 1463 /* make aggregates */ 1464 for( mm = clid = 0 ; mm < nloc ; mm++ ){ 1465 ierr = PetscCDSizeAt( agg_lists, mm, &ii ); CHKERRQ(ierr); 1466 if( ii > 0 ) { 1467 PetscInt asz=ii,cgid=my0+clid,rids[1000]; 1468 PetscCDPos pos; 1469 if(asz>1000)SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Very large aggregate: %d",asz); 1470 ii = 0; 1471 ierr = PetscCDGetHeadPos(agg_lists,mm,&pos); CHKERRQ(ierr); 1472 while(pos){ 1473 PetscInt gid1; 1474 ierr = PetscLLNGetID( pos, &gid1 ); CHKERRQ(ierr); 1475 ierr = PetscCDGetNextPos(agg_lists,mm,&pos); CHKERRQ(ierr); 1476 1477 rids[ii++] = gid1; 1478 } 1479 assert(ii==asz); 1480 /* add diagonal block of P0 */ 1481 ierr = MatSetValues(Prol22,asz,rids,1,&cgid,&val,INSERT_VALUES); CHKERRQ(ierr); 1482 1483 clid++; 1484 } /* coarse agg */ 1485 } /* for all fine nodes */ 1486 ierr = MatAssemblyBegin(Prol22,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr); 1487 ierr = MatAssemblyEnd(Prol22,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr); 1488 } 1489 1490 /* clean up */ 1491 ierr = MatDestroy( &Gmat ); CHKERRQ(ierr); 1492 ierr = PetscCDDestroy( agg_lists ); CHKERRQ(ierr); 1493 #if defined PETSC_USE_LOG 1494 ierr = PetscLogEventEnd(PC_GAMGKKTProl_AGG,0,0,0,0);CHKERRQ(ierr); 1495 #endif 1496 *a_P22 = Prol22; 1497 1498 PetscFunctionReturn(0); 1499 } 1500 1501 /* -------------------------------------------------------------------------- */ 1502 /* 1503 PCCreateGAMG_AGG 1504 1505 Input Parameter: 1506 . pc - 1507 */ 1508 #undef __FUNCT__ 1509 #define __FUNCT__ "PCCreateGAMG_AGG" 1510 PetscErrorCode PCCreateGAMG_AGG( PC pc ) 1511 { 1512 PetscErrorCode ierr; 1513 PC_MG *mg = (PC_MG*)pc->data; 1514 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1515 PC_GAMG_AGG *pc_gamg_agg; 1516 1517 PetscFunctionBegin; 1518 /* create sub context for SA */ 1519 ierr = PetscNewLog( pc, PC_GAMG_AGG, &pc_gamg_agg ); CHKERRQ(ierr); 1520 assert(!pc_gamg->subctx); 1521 pc_gamg->subctx = pc_gamg_agg; 1522 1523 pc->ops->setfromoptions = PCSetFromOptions_GAMG_AGG; 1524 pc->ops->destroy = PCDestroy_AGG; 1525 /* reset does not do anything; setup not virtual */ 1526 1527 /* set internal function pointers */ 1528 pc_gamg->graph = PCGAMGgraph_AGG; 1529 pc_gamg->coarsen = PCGAMGCoarsen_AGG; 1530 pc_gamg->prolongator = PCGAMGProlongator_AGG; 1531 pc_gamg->optprol = PCGAMGOptprol_AGG; 1532 pc_gamg->formkktprol = PCGAMGKKTProl_AGG; 1533 1534 pc_gamg->createdefaultdata = PCSetData_AGG; 1535 1536 ierr = PetscObjectComposeFunctionDynamic( (PetscObject)pc, 1537 "PCSetCoordinates_C", 1538 "PCSetCoordinates_AGG", 1539 PCSetCoordinates_AGG); 1540 PetscFunctionReturn(0); 1541 } 1542