1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc/private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */ 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGraph_AGG; 14 PetscLogEvent PC_GAMGGraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptProlongator_AGG; 20 #endif 21 22 /* #define GAMG_STAGES */ 23 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 24 static PetscLogStage gamg_stages[PETSC_GAMG_MAXLEVELS]; 25 #endif 26 27 static PetscFunctionList GAMGList = 0; 28 static PetscBool PCGAMGPackageInitialized; 29 30 /* ----------------------------------------------------------------------------- */ 31 PetscErrorCode PCReset_GAMG(PC pc) 32 { 33 PetscErrorCode ierr; 34 PC_MG *mg = (PC_MG*)pc->data; 35 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 36 37 PetscFunctionBegin; 38 if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n"); 39 pc_gamg->data_sz = 0; 40 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 41 PetscFunctionReturn(0); 42 } 43 44 /* -------------------------------------------------------------------------- */ 45 /* 46 PCGAMGCreateLevel_GAMG: create coarse op with RAP. repartition and/or reduce number 47 of active processors. 48 49 Input Parameter: 50 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 51 'pc_gamg->data_sz' are changed via repartitioning/reduction. 52 . Amat_fine - matrix on this fine (k) level 53 . cr_bs - coarse block size 54 In/Output Parameter: 55 . a_P_inout - prolongation operator to the next level (k-->k-1) 56 . a_nactive_proc - number of active procs 57 Output Parameter: 58 . a_Amat_crs - coarse matrix that is created (k-1) 59 */ 60 61 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last) 62 { 63 PetscErrorCode ierr; 64 PC_MG *mg = (PC_MG*)pc->data; 65 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 66 Mat Cmat,Pold=*a_P_inout; 67 MPI_Comm comm; 68 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 69 PetscInt ncrs_eq,ncrs,f_bs; 70 71 PetscFunctionBegin; 72 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 73 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 74 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 75 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 76 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 77 78 /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/ 79 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 80 if (pc_gamg->data_cell_rows>0) { 81 ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 82 } else { 83 PetscInt bs; 84 ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr); 85 ncrs = ncrs_eq/bs; 86 } 87 88 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 89 if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1; 90 else { 91 PetscInt ncrs_eq_glob; 92 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 93 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 94 if (!new_size) new_size = 1; /* not likely, posible? */ 95 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 96 } 97 98 if (Pcolumnperm) *Pcolumnperm = NULL; 99 100 if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 101 else { 102 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old; 103 IS is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices; 104 105 nloc_old = ncrs_eq/cr_bs; 106 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 107 #if defined PETSC_GAMG_USE_LOG 108 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 109 #endif 110 /* make 'is_eq_newproc' */ 111 ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr); 112 if (pc_gamg->repart) { 113 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 114 Mat adj; 115 116 ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr); 117 118 /* get 'adj' */ 119 if (cr_bs == 1) { 120 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 121 } else { 122 /* make a scalar matrix to partition (no Stokes here) */ 123 Mat tMat; 124 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 125 const PetscScalar *vals; 126 const PetscInt *idx; 127 PetscInt *d_nnz, *o_nnz, M, N; 128 static PetscInt llev = 0; 129 MatType mtype; 130 131 ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr); 132 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 133 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 134 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 135 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 136 d_nnz[jj] = ncols/cr_bs; 137 o_nnz[jj] = ncols/cr_bs; 138 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 139 if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs; 140 if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs; 141 } 142 143 ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr); 144 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 145 ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 146 ierr = MatSetType(tMat,mtype);CHKERRQ(ierr); 147 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 148 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 149 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 150 151 for (ii = Istart_crs; ii < Iend_crs; ii++) { 152 PetscInt dest_row = ii/cr_bs; 153 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 154 for (jj = 0; jj < ncols; jj++) { 155 PetscInt dest_col = idx[jj]/cr_bs; 156 PetscScalar v = 1.0; 157 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 158 } 159 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 160 } 161 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 162 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 163 164 if (llev++ == -1) { 165 PetscViewer viewer; char fname[32]; 166 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 167 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 168 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 169 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 170 } 171 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 172 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 173 } /* create 'adj' */ 174 175 { /* partition: get newproc_idx */ 176 char prefix[256]; 177 const char *pcpre; 178 const PetscInt *is_idx; 179 MatPartitioning mpart; 180 IS proc_is; 181 PetscInt targetPE; 182 183 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 184 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 185 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 186 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 187 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 188 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 189 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 190 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 191 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 192 193 /* collect IS info */ 194 ierr = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr); 195 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 196 targetPE = 1; /* bring to "front" of machine */ 197 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 198 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 199 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 200 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 201 } 202 } 203 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 204 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 205 } 206 ierr = MatDestroy(&adj);CHKERRQ(ierr); 207 208 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 209 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 210 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 211 PetscInt rfactor,targetPE; 212 213 /* find factor */ 214 if (new_size == 1) rfactor = size; /* easy */ 215 else { 216 PetscReal best_fact = 0.; 217 jj = -1; 218 for (kk = 1 ; kk <= size ; kk++) { 219 if (!(size%kk)) { /* a candidate */ 220 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 221 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 222 if (fact > best_fact) { 223 best_fact = fact; jj = kk; 224 } 225 } 226 } 227 if (jj != -1) rfactor = jj; 228 else rfactor = 1; /* does this happen .. a prime */ 229 } 230 new_size = size/rfactor; 231 232 if (new_size==nactive) { 233 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 234 ierr = PetscFree(counts);CHKERRQ(ierr); 235 ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr); 236 #if defined PETSC_GAMG_USE_LOG 237 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 238 #endif 239 PetscFunctionReturn(0); 240 } 241 242 ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr); 243 targetPE = rank/rfactor; 244 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 245 } /* end simple 'is_eq_newproc' */ 246 247 /* 248 Create an index set from the is_eq_newproc index set to indicate the mapping TO 249 */ 250 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 251 is_eq_num_prim = is_eq_num; 252 /* 253 Determine how many equations/vertices are assigned to each processor 254 */ 255 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 256 ncrs_eq_new = counts[rank]; 257 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 258 ncrs_new = ncrs_eq_new/cr_bs; /* eqs */ 259 260 ierr = PetscFree(counts);CHKERRQ(ierr); 261 #if defined PETSC_GAMG_USE_LOG 262 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 263 #endif 264 /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */ 265 { 266 Vec src_crd, dest_crd; 267 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 268 VecScatter vecscat; 269 PetscScalar *array; 270 IS isscat; 271 272 /* move data (for primal equations only) */ 273 /* Create a vector to contain the newly ordered element information */ 274 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 275 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr); 276 ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */ 277 /* 278 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 279 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 280 */ 281 ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr); 282 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 283 for (ii=0,jj=0; ii<ncrs; ii++) { 284 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 285 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 286 } 287 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 288 ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 289 ierr = PetscFree(tidx);CHKERRQ(ierr); 290 /* 291 Create a vector to contain the original vertex information for each element 292 */ 293 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr); 294 for (jj=0; jj<ndata_cols; jj++) { 295 const PetscInt stride0=ncrs*pc_gamg->data_cell_rows; 296 for (ii=0; ii<ncrs; ii++) { 297 for (kk=0; kk<ndata_rows; kk++) { 298 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 299 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 300 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 301 } 302 } 303 } 304 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 305 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 306 /* 307 Scatter the element vertex information (still in the original vertex ordering) 308 to the correct processor 309 */ 310 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 311 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 312 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 313 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 314 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 315 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 316 /* 317 Put the element vertex data into a new allocation of the gdata->ele 318 */ 319 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 320 ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr); 321 322 pc_gamg->data_sz = node_data_sz*ncrs_new; 323 strideNew = ncrs_new*ndata_rows; 324 325 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 326 for (jj=0; jj<ndata_cols; jj++) { 327 for (ii=0; ii<ncrs_new; ii++) { 328 for (kk=0; kk<ndata_rows; kk++) { 329 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 330 pc_gamg->data[ix] = PetscRealPart(array[jx]); 331 } 332 } 333 } 334 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 335 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 336 } 337 /* move A and P (columns) with new layout */ 338 #if defined PETSC_GAMG_USE_LOG 339 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 340 #endif 341 342 /* 343 Invert for MatCreateSubMatrix 344 */ 345 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 346 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 347 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 348 if (is_eq_num != is_eq_num_prim) { 349 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 350 } 351 if (Pcolumnperm) { 352 ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr); 353 *Pcolumnperm = new_eq_indices; 354 } 355 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 356 #if defined PETSC_GAMG_USE_LOG 357 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 358 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 359 #endif 360 /* 'a_Amat_crs' output */ 361 { 362 Mat mat; 363 ierr = MatCreateSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 364 *a_Amat_crs = mat; 365 } 366 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 367 368 #if defined PETSC_GAMG_USE_LOG 369 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 370 #endif 371 /* prolongator */ 372 { 373 IS findices; 374 PetscInt Istart,Iend; 375 Mat Pnew; 376 377 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 378 #if defined PETSC_GAMG_USE_LOG 379 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 380 #endif 381 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 382 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 383 ierr = MatCreateSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 384 ierr = ISDestroy(&findices);CHKERRQ(ierr); 385 386 #if defined PETSC_GAMG_USE_LOG 387 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 388 #endif 389 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 390 391 /* output - repartitioned */ 392 *a_P_inout = Pnew; 393 } 394 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 395 396 *a_nactive_proc = new_size; /* output */ 397 } 398 PetscFunctionReturn(0); 399 } 400 401 /* -------------------------------------------------------------------------- */ 402 /* 403 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 404 by setting data structures and options. 405 406 Input Parameter: 407 . pc - the preconditioner context 408 409 */ 410 PetscErrorCode PCSetUp_GAMG(PC pc) 411 { 412 PetscErrorCode ierr; 413 PC_MG *mg = (PC_MG*)pc->data; 414 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 415 Mat Pmat = pc->pmat; 416 PetscInt fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[PETSC_GAMG_MAXLEVELS]; 417 MPI_Comm comm; 418 PetscMPIInt rank,size,nactivepe; 419 Mat Aarr[PETSC_GAMG_MAXLEVELS],Parr[PETSC_GAMG_MAXLEVELS]; 420 IS *ASMLocalIDsArr[PETSC_GAMG_MAXLEVELS]; 421 PetscLogDouble nnz0=0.,nnztot=0.; 422 MatInfo info; 423 PetscBool is_last = PETSC_FALSE; 424 425 PetscFunctionBegin; 426 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 427 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 428 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 429 430 if (pc_gamg->setup_count++ > 0) { 431 if ((PetscBool)(!pc_gamg->reuse_prol)) { 432 /* reset everything */ 433 ierr = PCReset_MG(pc);CHKERRQ(ierr); 434 pc->setupcalled = 0; 435 } else { 436 PC_MG_Levels **mglevels = mg->levels; 437 /* just do Galerkin grids */ 438 Mat B,dA,dB; 439 440 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 441 if (pc_gamg->Nlevels > 1) { 442 /* currently only handle case where mat and pmat are the same on coarser levels */ 443 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr); 444 /* (re)set to get dirty flag */ 445 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr); 446 447 for (level=pc_gamg->Nlevels-2; level>=0; level--) { 448 /* the first time through the matrix structure has changed from repartitioning */ 449 if (pc_gamg->setup_count==2) { 450 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 451 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 452 453 mglevels[level]->A = B; 454 } else { 455 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr); 456 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 457 } 458 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr); 459 dB = B; 460 } 461 } 462 463 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 464 PetscFunctionReturn(0); 465 } 466 } 467 468 if (!pc_gamg->data) { 469 if (pc_gamg->orig_data) { 470 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 471 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 472 473 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 474 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 475 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 476 477 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr); 478 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 479 } else { 480 if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 481 ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr); 482 } 483 } 484 485 /* cache original data for reuse */ 486 if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) { 487 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr); 488 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 489 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 490 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 491 } 492 493 /* get basic dims */ 494 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 495 ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr); 496 497 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */ 498 nnz0 = info.nz_used; 499 nnztot = info.nz_used; 500 ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr); 501 502 /* Get A_i and R_i */ 503 for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) { 504 pc_gamg->current_level = level; 505 level1 = level + 1; 506 #if defined PETSC_GAMG_USE_LOG 507 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 508 #if (defined GAMG_STAGES) 509 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 510 #endif 511 #endif 512 { /* construct prolongator */ 513 Mat Gmat; 514 PetscCoarsenData *agg_lists; 515 Mat Prol11; 516 517 ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr); 518 ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 519 ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr); 520 521 /* could have failed to create new level */ 522 if (Prol11) { 523 /* get new block size of coarse matrices */ 524 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 525 526 if (pc_gamg->ops->optprolongator) { 527 /* smooth */ 528 ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr); 529 } 530 531 Parr[level1] = Prol11; 532 } else Parr[level1] = NULL; /* failed to coarsen */ 533 534 if (pc_gamg->use_aggs_in_asm) { 535 PetscInt bs; 536 ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr); 537 ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 538 } 539 540 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 541 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 542 } /* construct prolongator scope */ 543 #if defined PETSC_GAMG_USE_LOG 544 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 545 #endif 546 if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 547 if (!Parr[level1]) { /* failed to coarsen */ 548 ierr = PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr); 549 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES 550 ierr = PetscLogStagePop();CHKERRQ(ierr); 551 #endif 552 break; 553 } 554 #if defined PETSC_GAMG_USE_LOG 555 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 556 #endif 557 ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */ 558 if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????"); 559 if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE; 560 if (level1 == pc_gamg->Nlevels-1) is_last = PETSC_TRUE; 561 ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr); 562 563 #if defined PETSC_GAMG_USE_LOG 564 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 565 #endif 566 ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */ 567 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 568 nnztot += info.nz_used; 569 ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr); 570 571 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 572 ierr = PetscLogStagePop();CHKERRQ(ierr); 573 #endif 574 /* stop if one node or one proc -- could pull back for singular problems */ 575 if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) { 576 ierr = PetscInfo2(pc,"HARD stop of coarsening on level %D. Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr); 577 level++; 578 break; 579 } 580 } /* levels */ 581 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 582 583 ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr); 584 pc_gamg->Nlevels = level + 1; 585 fine_level = level; 586 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 587 588 if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 589 /* set default smoothers & set operators */ 590 for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) { 591 KSP smoother; 592 PC subpc; 593 594 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 595 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 596 597 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 598 /* set ops */ 599 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr); 600 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 601 602 /* set defaults */ 603 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 604 605 /* set blocks for ASM smoother that uses the 'aggregates' */ 606 if (pc_gamg->use_aggs_in_asm) { 607 PetscInt sz; 608 IS *iss; 609 610 sz = nASMBlocksArr[level]; 611 iss = ASMLocalIDsArr[level]; 612 ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr); 613 ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr); 614 ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr); 615 if (!sz) { 616 IS is; 617 ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 618 ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr); 619 ierr = ISDestroy(&is);CHKERRQ(ierr); 620 } else { 621 PetscInt kk; 622 ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr); 623 for (kk=0; kk<sz; kk++) { 624 ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr); 625 } 626 ierr = PetscFree(iss);CHKERRQ(ierr); 627 } 628 ASMLocalIDsArr[level] = NULL; 629 nASMBlocksArr[level] = 0; 630 } else { 631 ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr); 632 } 633 } 634 { 635 /* coarse grid */ 636 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 637 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 638 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 639 ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr); 640 if (!pc_gamg->use_parallel_coarse_grid_solver) { 641 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 642 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 643 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 644 ierr = PCSetUp(subpc);CHKERRQ(ierr); 645 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 646 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 647 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 648 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 649 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 650 ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr); 651 ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr); 652 /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in 653 * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to 654 * view every subdomain as though they were different. */ 655 ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE; 656 } 657 } 658 659 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 660 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 661 ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr); 662 ierr = PetscOptionsEnd();CHKERRQ(ierr); 663 ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr); 664 665 /* clean up */ 666 for (level=1; level<pc_gamg->Nlevels; level++) { 667 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 668 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 669 } 670 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 671 } else { 672 KSP smoother; 673 ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr); 674 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 675 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr); 676 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 677 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 678 } 679 PetscFunctionReturn(0); 680 } 681 682 /* ------------------------------------------------------------------------- */ 683 /* 684 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 685 that was created with PCCreate_GAMG(). 686 687 Input Parameter: 688 . pc - the preconditioner context 689 690 Application Interface Routine: PCDestroy() 691 */ 692 PetscErrorCode PCDestroy_GAMG(PC pc) 693 { 694 PetscErrorCode ierr; 695 PC_MG *mg = (PC_MG*)pc->data; 696 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 697 698 PetscFunctionBegin; 699 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 700 if (pc_gamg->ops->destroy) { 701 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 702 } 703 ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr); 704 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 705 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 706 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 707 PetscFunctionReturn(0); 708 } 709 710 /*@ 711 PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction. 712 713 Logically Collective on PC 714 715 Input Parameters: 716 + pc - the preconditioner context 717 - n - the number of equations 718 719 720 Options Database Key: 721 . -pc_gamg_process_eq_limit <limit> 722 723 Notes: 724 GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process 725 that has degrees of freedom 726 727 Level: intermediate 728 729 Concepts: Unstructured multigrid preconditioner 730 731 .seealso: PCGAMGSetCoarseEqLim() 732 @*/ 733 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 734 { 735 PetscErrorCode ierr; 736 737 PetscFunctionBegin; 738 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 739 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 740 PetscFunctionReturn(0); 741 } 742 743 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 744 { 745 PC_MG *mg = (PC_MG*)pc->data; 746 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 747 748 PetscFunctionBegin; 749 if (n>0) pc_gamg->min_eq_proc = n; 750 PetscFunctionReturn(0); 751 } 752 753 /*@ 754 PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid. 755 756 Collective on PC 757 758 Input Parameters: 759 + pc - the preconditioner context 760 - n - maximum number of equations to aim for 761 762 Options Database Key: 763 . -pc_gamg_coarse_eq_limit <limit> 764 765 Level: intermediate 766 767 Concepts: Unstructured multigrid preconditioner 768 769 .seealso: PCGAMGSetProcEqLim() 770 @*/ 771 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 772 { 773 PetscErrorCode ierr; 774 775 PetscFunctionBegin; 776 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 777 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 778 PetscFunctionReturn(0); 779 } 780 781 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 782 { 783 PC_MG *mg = (PC_MG*)pc->data; 784 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 785 786 PetscFunctionBegin; 787 if (n>0) pc_gamg->coarse_eq_limit = n; 788 PetscFunctionReturn(0); 789 } 790 791 /*@ 792 PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids 793 794 Collective on PC 795 796 Input Parameters: 797 + pc - the preconditioner context 798 - n - PETSC_TRUE or PETSC_FALSE 799 800 Options Database Key: 801 . -pc_gamg_repartition <true,false> 802 803 Notes: 804 this will generally improve the loading balancing of the work on each level 805 806 Level: intermediate 807 808 Concepts: Unstructured multigrid preconditioner 809 810 .seealso: () 811 @*/ 812 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n) 813 { 814 PetscErrorCode ierr; 815 816 PetscFunctionBegin; 817 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 818 ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 819 PetscFunctionReturn(0); 820 } 821 822 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n) 823 { 824 PC_MG *mg = (PC_MG*)pc->data; 825 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 826 827 PetscFunctionBegin; 828 pc_gamg->repart = n; 829 PetscFunctionReturn(0); 830 } 831 832 /*@ 833 PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner 834 835 Collective on PC 836 837 Input Parameters: 838 + pc - the preconditioner context 839 - n - PETSC_TRUE or PETSC_FALSE 840 841 Options Database Key: 842 . -pc_gamg_reuse_interpolation <true,false> 843 844 Level: intermediate 845 846 Notes: 847 this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows 848 rebuilding the preconditioner quicker. 849 850 Concepts: Unstructured multigrid preconditioner 851 852 .seealso: () 853 @*/ 854 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n) 855 { 856 PetscErrorCode ierr; 857 858 PetscFunctionBegin; 859 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 860 ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 861 PetscFunctionReturn(0); 862 } 863 864 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) 865 { 866 PC_MG *mg = (PC_MG*)pc->data; 867 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 868 869 PetscFunctionBegin; 870 pc_gamg->reuse_prol = n; 871 PetscFunctionReturn(0); 872 } 873 874 /*@ 875 PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner. 876 877 Collective on PC 878 879 Input Parameters: 880 + pc - the preconditioner context 881 - flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not 882 883 Options Database Key: 884 . -pc_gamg_asm_use_agg 885 886 Level: intermediate 887 888 Concepts: Unstructured multigrid preconditioner 889 890 .seealso: () 891 @*/ 892 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg) 893 { 894 PetscErrorCode ierr; 895 896 PetscFunctionBegin; 897 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 898 ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr); 899 PetscFunctionReturn(0); 900 } 901 902 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg) 903 { 904 PC_MG *mg = (PC_MG*)pc->data; 905 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 906 907 PetscFunctionBegin; 908 pc_gamg->use_aggs_in_asm = flg; 909 PetscFunctionReturn(0); 910 } 911 912 /*@ 913 PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver 914 915 Collective on PC 916 917 Input Parameters: 918 + pc - the preconditioner context 919 - flg - PETSC_TRUE to not force coarse grid onto one processor 920 921 Options Database Key: 922 . -pc_gamg_use_parallel_coarse_grid_solver 923 924 Level: intermediate 925 926 Concepts: Unstructured multigrid preconditioner 927 928 .seealso: () 929 @*/ 930 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg) 931 { 932 PetscErrorCode ierr; 933 934 PetscFunctionBegin; 935 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 936 ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr); 937 PetscFunctionReturn(0); 938 } 939 940 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg) 941 { 942 PC_MG *mg = (PC_MG*)pc->data; 943 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 944 945 PetscFunctionBegin; 946 pc_gamg->use_parallel_coarse_grid_solver = flg; 947 PetscFunctionReturn(0); 948 } 949 950 /*@ 951 PCGAMGSetNlevels - Sets the maximum number of levels PCGAMG will use 952 953 Not collective on PC 954 955 Input Parameters: 956 + pc - the preconditioner 957 - n - the maximum number of levels to use 958 959 Options Database Key: 960 . -pc_mg_levels 961 962 Level: intermediate 963 964 Concepts: Unstructured multigrid preconditioner 965 966 .seealso: () 967 @*/ 968 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 969 { 970 PetscErrorCode ierr; 971 972 PetscFunctionBegin; 973 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 974 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 975 PetscFunctionReturn(0); 976 } 977 978 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 979 { 980 PC_MG *mg = (PC_MG*)pc->data; 981 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 982 983 PetscFunctionBegin; 984 pc_gamg->Nlevels = n; 985 PetscFunctionReturn(0); 986 } 987 988 /*@ 989 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 990 991 Not collective on PC 992 993 Input Parameters: 994 + pc - the preconditioner context 995 - threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph 996 997 Options Database Key: 998 . -pc_gamg_threshold <threshold> 999 1000 Notes: 1001 Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different 1002 (perhaps better) coarser set of points. 1003 1004 Level: intermediate 1005 1006 Concepts: Unstructured multigrid preconditioner 1007 1008 .seealso: PCGAMGFilterGraph() 1009 @*/ 1010 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal v[], PetscInt n) 1011 { 1012 PetscErrorCode ierr; 1013 1014 PetscFunctionBegin; 1015 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1016 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal[],PetscInt),(pc,v,n));CHKERRQ(ierr); 1017 PetscFunctionReturn(0); 1018 } 1019 1020 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal v[], PetscInt n) 1021 { 1022 PC_MG *mg = (PC_MG*)pc->data; 1023 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1024 PetscInt i; 1025 PetscFunctionBegin; 1026 for (i=0;i<n;i++) pc_gamg->threshold[i] = v[i]; 1027 do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS); 1028 PetscFunctionReturn(0); 1029 } 1030 1031 /*@ 1032 PCGAMGSetThresholdScale - Relative threshold reduction at each level 1033 1034 Not collective on PC 1035 1036 Input Parameters: 1037 + pc - the preconditioner context 1038 - scale - the threshold value reduction, ussually < 1.0 1039 1040 Options Database Key: 1041 . -pc_gamg_threshold_scale <v> 1042 1043 Level: advanced 1044 1045 Concepts: Unstructured multigrid preconditioner 1046 1047 .seealso: () 1048 @*/ 1049 PetscErrorCode PCGAMGSetThresholdScale(PC pc, PetscReal v) 1050 { 1051 PetscErrorCode ierr; 1052 1053 PetscFunctionBegin; 1054 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1055 ierr = PetscTryMethod(pc,"PCGAMGSetThresholdScale_C",(PC,PetscReal),(pc,v));CHKERRQ(ierr); 1056 PetscFunctionReturn(0); 1057 } 1058 1059 static PetscErrorCode PCGAMGSetThresholdScale_GAMG(PC pc, PetscReal v) 1060 { 1061 PC_MG *mg = (PC_MG*)pc->data; 1062 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1063 PetscFunctionBegin; 1064 pc_gamg->threshold_scale = v; 1065 PetscFunctionReturn(0); 1066 } 1067 1068 /*@C 1069 PCGAMGSetType - Set solution method 1070 1071 Collective on PC 1072 1073 Input Parameters: 1074 + pc - the preconditioner context 1075 - type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL 1076 1077 Options Database Key: 1078 . -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply 1079 1080 Level: intermediate 1081 1082 Concepts: Unstructured multigrid preconditioner 1083 1084 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType 1085 @*/ 1086 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1087 { 1088 PetscErrorCode ierr; 1089 1090 PetscFunctionBegin; 1091 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1092 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1093 PetscFunctionReturn(0); 1094 } 1095 1096 /*@C 1097 PCGAMGGetType - Get solution method 1098 1099 Collective on PC 1100 1101 Input Parameter: 1102 . pc - the preconditioner context 1103 1104 Output Parameter: 1105 . type - the type of algorithm used 1106 1107 Level: intermediate 1108 1109 Concepts: Unstructured multigrid preconditioner 1110 1111 .seealso: PCGAMGSetType(), PCGAMGType 1112 @*/ 1113 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type) 1114 { 1115 PetscErrorCode ierr; 1116 1117 PetscFunctionBegin; 1118 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1119 ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr); 1120 PetscFunctionReturn(0); 1121 } 1122 1123 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type) 1124 { 1125 PC_MG *mg = (PC_MG*)pc->data; 1126 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1127 1128 PetscFunctionBegin; 1129 *type = pc_gamg->type; 1130 PetscFunctionReturn(0); 1131 } 1132 1133 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1134 { 1135 PetscErrorCode ierr,(*r)(PC); 1136 PC_MG *mg = (PC_MG*)pc->data; 1137 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1138 1139 PetscFunctionBegin; 1140 pc_gamg->type = type; 1141 ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr); 1142 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1143 if (pc_gamg->ops->destroy) { 1144 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1145 ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr); 1146 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1147 /* cleaning up common data in pc_gamg - this should disapear someday */ 1148 pc_gamg->data_cell_cols = 0; 1149 pc_gamg->data_cell_rows = 0; 1150 pc_gamg->orig_data_cell_cols = 0; 1151 pc_gamg->orig_data_cell_rows = 0; 1152 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 1153 pc_gamg->data_sz = 0; 1154 } 1155 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1156 ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr); 1157 ierr = (*r)(pc);CHKERRQ(ierr); 1158 PetscFunctionReturn(0); 1159 } 1160 1161 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer) 1162 { 1163 PetscErrorCode ierr,i; 1164 PC_MG *mg = (PC_MG*)pc->data; 1165 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1166 1167 PetscFunctionBegin; 1168 ierr = PetscViewerASCIIPrintf(viewer," GAMG specific options\n");CHKERRQ(ierr); 1169 ierr = PetscViewerASCIIPrintf(viewer," Threshold for dropping small values in graph on each level =");CHKERRQ(ierr); 1170 for (i=0;i<pc_gamg->current_level;i++) { 1171 ierr = PetscViewerASCIIPrintf(viewer," %g",(double)pc_gamg->threshold[i]);CHKERRQ(ierr); 1172 } 1173 ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr); 1174 ierr = PetscViewerASCIIPrintf(viewer," Threshold scaling factor for each level not specified = %g\n",(double)pc_gamg->threshold_scale);CHKERRQ(ierr); 1175 if (pc_gamg->use_aggs_in_asm) { 1176 ierr = PetscViewerASCIIPrintf(viewer," Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr); 1177 } 1178 if (pc_gamg->use_parallel_coarse_grid_solver) { 1179 ierr = PetscViewerASCIIPrintf(viewer," Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr); 1180 } 1181 if (pc_gamg->ops->view) { 1182 ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr); 1183 } 1184 PetscFunctionReturn(0); 1185 } 1186 1187 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc) 1188 { 1189 PetscErrorCode ierr; 1190 PC_MG *mg = (PC_MG*)pc->data; 1191 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1192 PetscBool flag; 1193 MPI_Comm comm; 1194 char prefix[256]; 1195 PetscInt i,n; 1196 const char *pcpre; 1197 1198 PetscFunctionBegin; 1199 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1200 ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr); 1201 { 1202 char tname[256]; 1203 ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr); 1204 if (flag) { 1205 ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr); 1206 } 1207 ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr); 1208 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr); 1209 ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation aggregates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr); 1210 ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr); 1211 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr); 1212 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr); 1213 ierr = PetscOptionsReal("-pc_gamg_threshold_scale","Scaling of threshold for each level not specified","PCGAMGSetThresholdScale",pc_gamg->threshold_scale,&pc_gamg->threshold_scale,NULL);CHKERRQ(ierr); 1214 n = PETSC_GAMG_MAXLEVELS; 1215 ierr = PetscOptionsRealArray("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&n,&flag);CHKERRQ(ierr); 1216 if (!flag || n < PETSC_GAMG_MAXLEVELS) { 1217 if (!flag) n = 1; 1218 i = n; 1219 do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS); 1220 } 1221 ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 1222 1223 /* set options for subtype */ 1224 if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);} 1225 } 1226 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 1227 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 1228 ierr = PetscOptionsTail();CHKERRQ(ierr); 1229 PetscFunctionReturn(0); 1230 } 1231 1232 /* -------------------------------------------------------------------------- */ 1233 /*MC 1234 PCGAMG - Geometric algebraic multigrid (AMG) preconditioner 1235 1236 Options Database Keys: 1237 + -pc_gamg_type <type> - one of agg, geo, or classical 1238 . -pc_gamg_repartition <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined 1239 . -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations 1240 . -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother 1241 . -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> 1242 equations on each process that has degrees of freedom 1243 . -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for. 1244 . -pc_gamg_threshold[] <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph on each level 1245 - -pc_gamg_threshold_scale <scale,default=1> - Scaling of threshold on each coarser grid if not specified 1246 1247 Options Database Keys for default Aggregation: 1248 + -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation 1249 . -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation 1250 - -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it 1251 1252 Multigrid options: 1253 + -pc_mg_cycles <v> - v or w, see PCMGSetCycleType() 1254 . -pc_mg_distinct_smoothup - configure the up and down (pre and post) smoothers separately, see PCMGSetDistinctSmoothUp() 1255 . -pc_mg_type <multiplicative> - (one of) additive multiplicative full kascade 1256 - -pc_mg_levels <levels> - Number of levels of multigrid to use. 1257 1258 1259 Notes: 1260 In order to obtain good performance for PCGAMG for vector valued problems you must 1261 Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point 1262 Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator 1263 See the Users Manual Chapter 4 for more details 1264 1265 Level: intermediate 1266 1267 Concepts: algebraic multigrid 1268 1269 .seealso: PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 1270 PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation() 1271 M*/ 1272 1273 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) 1274 { 1275 PetscErrorCode ierr,i; 1276 PC_GAMG *pc_gamg; 1277 PC_MG *mg; 1278 1279 PetscFunctionBegin; 1280 /* register AMG type */ 1281 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1282 1283 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1284 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); 1285 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1286 1287 /* create a supporting struct and attach it to pc */ 1288 ierr = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr); 1289 ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr); 1290 mg = (PC_MG*)pc->data; 1291 mg->innerctx = pc_gamg; 1292 1293 ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr); 1294 1295 pc_gamg->setup_count = 0; 1296 /* these should be in subctx but repartitioning needs simple arrays */ 1297 pc_gamg->data_sz = 0; 1298 pc_gamg->data = 0; 1299 1300 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1301 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1302 pc->ops->setup = PCSetUp_GAMG; 1303 pc->ops->reset = PCReset_GAMG; 1304 pc->ops->destroy = PCDestroy_GAMG; 1305 mg->view = PCView_GAMG; 1306 1307 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1308 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1309 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr); 1310 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr); 1311 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr); 1312 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr); 1313 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1314 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThresholdScale_C",PCGAMGSetThresholdScale_GAMG);CHKERRQ(ierr); 1315 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr); 1316 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr); 1317 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr); 1318 pc_gamg->repart = PETSC_FALSE; 1319 pc_gamg->reuse_prol = PETSC_FALSE; 1320 pc_gamg->use_aggs_in_asm = PETSC_FALSE; 1321 pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE; 1322 pc_gamg->min_eq_proc = 50; 1323 pc_gamg->coarse_eq_limit = 50; 1324 for (i=0;i<PETSC_GAMG_MAXLEVELS;i++) pc_gamg->threshold[i] = 0.; 1325 pc_gamg->threshold_scale = 1.; 1326 pc_gamg->Nlevels = PETSC_GAMG_MAXLEVELS; 1327 pc_gamg->current_level = 0; /* don't need to init really */ 1328 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1329 1330 /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */ 1331 ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr); 1332 PetscFunctionReturn(0); 1333 } 1334 1335 /*@C 1336 PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called 1337 from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG() 1338 when using static libraries. 1339 1340 Level: developer 1341 1342 .keywords: PC, PCGAMG, initialize, package 1343 .seealso: PetscInitialize() 1344 @*/ 1345 PetscErrorCode PCGAMGInitializePackage(void) 1346 { 1347 PetscErrorCode ierr; 1348 1349 PetscFunctionBegin; 1350 if (PCGAMGPackageInitialized) PetscFunctionReturn(0); 1351 PCGAMGPackageInitialized = PETSC_TRUE; 1352 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr); 1353 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr); 1354 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr); 1355 ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr); 1356 1357 /* general events */ 1358 ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr); 1359 ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr); 1360 ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1361 ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1362 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1363 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1364 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr); 1365 1366 #if defined PETSC_GAMG_USE_LOG 1367 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1368 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1369 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1370 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1371 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1372 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1373 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1374 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1375 ierr = PetscLogEventRegister(" search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1376 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1377 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1378 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1379 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1380 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1381 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1382 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1383 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1384 1385 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1386 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1387 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1388 /* create timer stages */ 1389 #if defined GAMG_STAGES 1390 { 1391 char str[32]; 1392 PetscInt lidx; 1393 sprintf(str,"MG Level %d (finest)",0); 1394 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1395 for (lidx=1; lidx<9; lidx++) { 1396 sprintf(str,"MG Level %d",lidx); 1397 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1398 } 1399 } 1400 #endif 1401 #endif 1402 PetscFunctionReturn(0); 1403 } 1404 1405 /*@C 1406 PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is 1407 called from PetscFinalize() automatically. 1408 1409 Level: developer 1410 1411 .keywords: Petsc, destroy, package 1412 .seealso: PetscFinalize() 1413 @*/ 1414 PetscErrorCode PCGAMGFinalizePackage(void) 1415 { 1416 PetscErrorCode ierr; 1417 1418 PetscFunctionBegin; 1419 PCGAMGPackageInitialized = PETSC_FALSE; 1420 ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr); 1421 PetscFunctionReturn(0); 1422 } 1423 1424 /*@C 1425 PCGAMGRegister - Register a PCGAMG implementation. 1426 1427 Input Parameters: 1428 + type - string that will be used as the name of the GAMG type. 1429 - create - function for creating the gamg context. 1430 1431 Level: advanced 1432 1433 .seealso: PCGAMGType, PCGAMG, PCGAMGSetType() 1434 @*/ 1435 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC)) 1436 { 1437 PetscErrorCode ierr; 1438 1439 PetscFunctionBegin; 1440 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1441 ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr); 1442 PetscFunctionReturn(0); 1443 } 1444 1445