1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc/private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */ 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGraph_AGG; 14 PetscLogEvent PC_GAMGGraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptProlongator_AGG; 20 #endif 21 22 /* #define GAMG_STAGES */ 23 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 24 static PetscLogStage gamg_stages[PETSC_GAMG_MAXLEVELS]; 25 #endif 26 27 static PetscFunctionList GAMGList = 0; 28 static PetscBool PCGAMGPackageInitialized; 29 30 /* ----------------------------------------------------------------------------- */ 31 #undef __FUNCT__ 32 #define __FUNCT__ "PCReset_GAMG" 33 PetscErrorCode PCReset_GAMG(PC pc) 34 { 35 PetscErrorCode ierr; 36 PC_MG *mg = (PC_MG*)pc->data; 37 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 38 39 PetscFunctionBegin; 40 if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n"); 41 pc_gamg->data_sz = 0; 42 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 43 PetscFunctionReturn(0); 44 } 45 46 /* -------------------------------------------------------------------------- */ 47 /* 48 PCGAMGCreateLevel_GAMG: create coarse op with RAP. repartition and/or reduce number 49 of active processors. 50 51 Input Parameter: 52 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 53 'pc_gamg->data_sz' are changed via repartitioning/reduction. 54 . Amat_fine - matrix on this fine (k) level 55 . cr_bs - coarse block size 56 In/Output Parameter: 57 . a_P_inout - prolongation operator to the next level (k-->k-1) 58 . a_nactive_proc - number of active procs 59 Output Parameter: 60 . a_Amat_crs - coarse matrix that is created (k-1) 61 */ 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "PCGAMGCreateLevel_GAMG" 65 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last) 66 { 67 PetscErrorCode ierr; 68 PC_MG *mg = (PC_MG*)pc->data; 69 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 70 Mat Cmat,Pold=*a_P_inout; 71 MPI_Comm comm; 72 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 73 PetscInt ncrs_eq,ncrs,f_bs; 74 75 PetscFunctionBegin; 76 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 77 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 78 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 79 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 80 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 81 82 /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/ 83 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 84 if (pc_gamg->data_cell_rows>0) { 85 ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 86 } else { 87 PetscInt bs; 88 ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr); 89 ncrs = ncrs_eq/bs; 90 } 91 92 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 93 if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1; 94 else { 95 PetscInt ncrs_eq_glob; 96 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 97 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 98 if (!new_size) new_size = 1; /* not likely, posible? */ 99 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 100 } 101 102 if (Pcolumnperm) *Pcolumnperm = NULL; 103 104 if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 105 else { 106 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old; 107 IS is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices; 108 109 nloc_old = ncrs_eq/cr_bs; 110 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 111 #if defined PETSC_GAMG_USE_LOG 112 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 113 #endif 114 /* make 'is_eq_newproc' */ 115 ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr); 116 if (pc_gamg->repart) { 117 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 118 Mat adj; 119 120 ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr); 121 122 /* get 'adj' */ 123 if (cr_bs == 1) { 124 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 125 } else { 126 /* make a scalar matrix to partition (no Stokes here) */ 127 Mat tMat; 128 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 129 const PetscScalar *vals; 130 const PetscInt *idx; 131 PetscInt *d_nnz, *o_nnz, M, N; 132 static PetscInt llev = 0; 133 MatType mtype; 134 135 ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr); 136 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 137 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 138 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 139 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 140 d_nnz[jj] = ncols/cr_bs; 141 o_nnz[jj] = ncols/cr_bs; 142 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 143 if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs; 144 if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs; 145 } 146 147 ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr); 148 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 149 ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 150 ierr = MatSetType(tMat,mtype);CHKERRQ(ierr); 151 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 152 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 153 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 154 155 for (ii = Istart_crs; ii < Iend_crs; ii++) { 156 PetscInt dest_row = ii/cr_bs; 157 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 158 for (jj = 0; jj < ncols; jj++) { 159 PetscInt dest_col = idx[jj]/cr_bs; 160 PetscScalar v = 1.0; 161 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 162 } 163 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 164 } 165 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 166 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 167 168 if (llev++ == -1) { 169 PetscViewer viewer; char fname[32]; 170 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 171 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 172 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 173 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 174 } 175 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 176 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 177 } /* create 'adj' */ 178 179 { /* partition: get newproc_idx */ 180 char prefix[256]; 181 const char *pcpre; 182 const PetscInt *is_idx; 183 MatPartitioning mpart; 184 IS proc_is; 185 PetscInt targetPE; 186 187 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 188 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 189 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 190 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 191 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 192 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 193 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 194 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 195 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 196 197 /* collect IS info */ 198 ierr = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr); 199 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 200 targetPE = 1; /* bring to "front" of machine */ 201 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 202 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 203 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 204 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 205 } 206 } 207 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 208 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 209 } 210 ierr = MatDestroy(&adj);CHKERRQ(ierr); 211 212 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 213 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 214 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 215 PetscInt rfactor,targetPE; 216 217 /* find factor */ 218 if (new_size == 1) rfactor = size; /* easy */ 219 else { 220 PetscReal best_fact = 0.; 221 jj = -1; 222 for (kk = 1 ; kk <= size ; kk++) { 223 if (!(size%kk)) { /* a candidate */ 224 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 225 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 226 if (fact > best_fact) { 227 best_fact = fact; jj = kk; 228 } 229 } 230 } 231 if (jj != -1) rfactor = jj; 232 else rfactor = 1; /* does this happen .. a prime */ 233 } 234 new_size = size/rfactor; 235 236 if (new_size==nactive) { 237 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 238 ierr = PetscFree(counts);CHKERRQ(ierr); 239 ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr); 240 #if defined PETSC_GAMG_USE_LOG 241 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 242 #endif 243 PetscFunctionReturn(0); 244 } 245 246 ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr); 247 targetPE = rank/rfactor; 248 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 249 } /* end simple 'is_eq_newproc' */ 250 251 /* 252 Create an index set from the is_eq_newproc index set to indicate the mapping TO 253 */ 254 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 255 is_eq_num_prim = is_eq_num; 256 /* 257 Determine how many equations/vertices are assigned to each processor 258 */ 259 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 260 ncrs_eq_new = counts[rank]; 261 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 262 ncrs_new = ncrs_eq_new/cr_bs; /* eqs */ 263 264 ierr = PetscFree(counts);CHKERRQ(ierr); 265 #if defined PETSC_GAMG_USE_LOG 266 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 267 #endif 268 /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */ 269 { 270 Vec src_crd, dest_crd; 271 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 272 VecScatter vecscat; 273 PetscScalar *array; 274 IS isscat; 275 276 /* move data (for primal equations only) */ 277 /* Create a vector to contain the newly ordered element information */ 278 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 279 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr); 280 ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */ 281 /* 282 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 283 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 284 */ 285 ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr); 286 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 287 for (ii=0,jj=0; ii<ncrs; ii++) { 288 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 289 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 290 } 291 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 292 ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 293 ierr = PetscFree(tidx);CHKERRQ(ierr); 294 /* 295 Create a vector to contain the original vertex information for each element 296 */ 297 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr); 298 for (jj=0; jj<ndata_cols; jj++) { 299 const PetscInt stride0=ncrs*pc_gamg->data_cell_rows; 300 for (ii=0; ii<ncrs; ii++) { 301 for (kk=0; kk<ndata_rows; kk++) { 302 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 303 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 304 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 305 } 306 } 307 } 308 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 309 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 310 /* 311 Scatter the element vertex information (still in the original vertex ordering) 312 to the correct processor 313 */ 314 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 315 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 316 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 317 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 318 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 319 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 320 /* 321 Put the element vertex data into a new allocation of the gdata->ele 322 */ 323 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 324 ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr); 325 326 pc_gamg->data_sz = node_data_sz*ncrs_new; 327 strideNew = ncrs_new*ndata_rows; 328 329 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 330 for (jj=0; jj<ndata_cols; jj++) { 331 for (ii=0; ii<ncrs_new; ii++) { 332 for (kk=0; kk<ndata_rows; kk++) { 333 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 334 pc_gamg->data[ix] = PetscRealPart(array[jx]); 335 } 336 } 337 } 338 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 339 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 340 } 341 /* move A and P (columns) with new layout */ 342 #if defined PETSC_GAMG_USE_LOG 343 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 344 #endif 345 346 /* 347 Invert for MatGetSubMatrix 348 */ 349 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 350 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 351 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 352 if (is_eq_num != is_eq_num_prim) { 353 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 354 } 355 if (Pcolumnperm) { 356 ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr); 357 *Pcolumnperm = new_eq_indices; 358 } 359 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 360 #if defined PETSC_GAMG_USE_LOG 361 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 362 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 363 #endif 364 /* 'a_Amat_crs' output */ 365 { 366 Mat mat; 367 ierr = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 368 *a_Amat_crs = mat; 369 } 370 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 371 372 #if defined PETSC_GAMG_USE_LOG 373 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 374 #endif 375 /* prolongator */ 376 { 377 IS findices; 378 PetscInt Istart,Iend; 379 Mat Pnew; 380 381 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 382 #if defined PETSC_GAMG_USE_LOG 383 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 384 #endif 385 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 386 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 387 ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 388 ierr = ISDestroy(&findices);CHKERRQ(ierr); 389 390 #if defined PETSC_GAMG_USE_LOG 391 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 392 #endif 393 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 394 395 /* output - repartitioned */ 396 *a_P_inout = Pnew; 397 } 398 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 399 400 *a_nactive_proc = new_size; /* output */ 401 } 402 PetscFunctionReturn(0); 403 } 404 405 /* -------------------------------------------------------------------------- */ 406 /* 407 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 408 by setting data structures and options. 409 410 Input Parameter: 411 . pc - the preconditioner context 412 413 */ 414 #undef __FUNCT__ 415 #define __FUNCT__ "PCSetUp_GAMG" 416 PetscErrorCode PCSetUp_GAMG(PC pc) 417 { 418 PetscErrorCode ierr; 419 PC_MG *mg = (PC_MG*)pc->data; 420 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 421 Mat Pmat = pc->pmat; 422 PetscInt fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[PETSC_GAMG_MAXLEVELS]; 423 MPI_Comm comm; 424 PetscMPIInt rank,size,nactivepe; 425 Mat Aarr[PETSC_GAMG_MAXLEVELS],Parr[PETSC_GAMG_MAXLEVELS]; 426 IS *ASMLocalIDsArr[PETSC_GAMG_MAXLEVELS]; 427 PetscLogDouble nnz0=0.,nnztot=0.; 428 MatInfo info; 429 PetscBool is_last = PETSC_FALSE; 430 431 PetscFunctionBegin; 432 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 433 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 434 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 435 436 if (pc_gamg->setup_count++ > 0) { 437 if ((PetscBool)(!pc_gamg->reuse_prol)) { 438 /* reset everything */ 439 ierr = PCReset_MG(pc);CHKERRQ(ierr); 440 pc->setupcalled = 0; 441 } else { 442 PC_MG_Levels **mglevels = mg->levels; 443 /* just do Galerkin grids */ 444 Mat B,dA,dB; 445 446 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 447 if (pc_gamg->Nlevels > 1) { 448 /* currently only handle case where mat and pmat are the same on coarser levels */ 449 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr); 450 /* (re)set to get dirty flag */ 451 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr); 452 453 for (level=pc_gamg->Nlevels-2; level>=0; level--) { 454 /* the first time through the matrix structure has changed from repartitioning */ 455 if (pc_gamg->setup_count==2) { 456 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 457 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 458 459 mglevels[level]->A = B; 460 } else { 461 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr); 462 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 463 } 464 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr); 465 dB = B; 466 } 467 } 468 469 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 470 PetscFunctionReturn(0); 471 } 472 } 473 474 if (!pc_gamg->data) { 475 if (pc_gamg->orig_data) { 476 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 477 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 478 479 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 480 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 481 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 482 483 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr); 484 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 485 } else { 486 if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 487 ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr); 488 } 489 } 490 491 /* cache original data for reuse */ 492 if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) { 493 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr); 494 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 495 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 496 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 497 } 498 499 /* get basic dims */ 500 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 501 ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr); 502 503 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */ 504 nnz0 = info.nz_used; 505 nnztot = info.nz_used; 506 ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr); 507 508 /* Get A_i and R_i */ 509 for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) { 510 pc_gamg->current_level = level; 511 level1 = level + 1; 512 #if defined PETSC_GAMG_USE_LOG 513 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 514 #if (defined GAMG_STAGES) 515 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 516 #endif 517 #endif 518 { /* construct prolongator */ 519 Mat Gmat; 520 PetscCoarsenData *agg_lists; 521 Mat Prol11; 522 523 ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr); 524 ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 525 ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr); 526 527 /* could have failed to create new level */ 528 if (Prol11) { 529 /* get new block size of coarse matrices */ 530 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 531 532 if (pc_gamg->ops->optprolongator) { 533 /* smooth */ 534 ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr); 535 } 536 537 Parr[level1] = Prol11; 538 } else Parr[level1] = NULL; /* failed to coarsen */ 539 540 if (pc_gamg->use_aggs_in_asm) { 541 PetscInt bs; 542 ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr); 543 ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 544 } 545 546 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 547 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 548 } /* construct prolongator scope */ 549 #if defined PETSC_GAMG_USE_LOG 550 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 551 #endif 552 if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 553 if (!Parr[level1]) { /* failed to coarsen */ 554 ierr = PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr); 555 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES 556 ierr = PetscLogStagePop();CHKERRQ(ierr); 557 #endif 558 break; 559 } 560 #if defined PETSC_GAMG_USE_LOG 561 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 562 #endif 563 ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */ 564 if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????"); 565 if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE; 566 ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr); 567 568 #if defined PETSC_GAMG_USE_LOG 569 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 570 #endif 571 ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */ 572 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 573 nnztot += info.nz_used; 574 ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr); 575 576 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 577 ierr = PetscLogStagePop();CHKERRQ(ierr); 578 #endif 579 /* stop if one node or one proc -- could pull back for singular problems */ 580 if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) { 581 ierr = PetscInfo2(pc,"HARD stop of coarsening on level %D. Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr); 582 level++; 583 break; 584 } 585 } /* levels */ 586 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 587 588 ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr); 589 pc_gamg->Nlevels = level + 1; 590 fine_level = level; 591 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 592 593 if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 594 /* set default smoothers & set operators */ 595 for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) { 596 KSP smoother; 597 PC subpc; 598 599 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 600 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 601 602 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 603 /* set ops */ 604 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr); 605 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 606 607 /* set defaults */ 608 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 609 610 /* set blocks for ASM smoother that uses the 'aggregates' */ 611 if (pc_gamg->use_aggs_in_asm) { 612 PetscInt sz; 613 IS *iss; 614 615 sz = nASMBlocksArr[level]; 616 iss = ASMLocalIDsArr[level]; 617 ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr); 618 ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr); 619 ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr); 620 if (!sz) { 621 IS is; 622 ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 623 ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr); 624 ierr = ISDestroy(&is);CHKERRQ(ierr); 625 } else { 626 PetscInt kk; 627 ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr); 628 for (kk=0; kk<sz; kk++) { 629 ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr); 630 } 631 ierr = PetscFree(iss);CHKERRQ(ierr); 632 } 633 ASMLocalIDsArr[level] = NULL; 634 nASMBlocksArr[level] = 0; 635 } else { 636 ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr); 637 } 638 } 639 { 640 /* coarse grid */ 641 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 642 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 643 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 644 ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr); 645 if (!pc_gamg->use_parallel_coarse_grid_solver) { 646 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 647 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 648 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 649 ierr = PCSetUp(subpc);CHKERRQ(ierr); 650 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 651 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 652 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 653 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 654 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 655 ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr); 656 ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr); 657 /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in 658 * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to 659 * view every subdomain as though they were different. */ 660 ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE; 661 } 662 } 663 664 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 665 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 666 ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr); 667 ierr = PetscOptionsEnd();CHKERRQ(ierr); 668 ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr); 669 670 /* clean up */ 671 for (level=1; level<pc_gamg->Nlevels; level++) { 672 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 673 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 674 } 675 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 676 } else { 677 KSP smoother; 678 ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr); 679 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 680 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr); 681 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 682 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 683 } 684 PetscFunctionReturn(0); 685 } 686 687 /* ------------------------------------------------------------------------- */ 688 /* 689 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 690 that was created with PCCreate_GAMG(). 691 692 Input Parameter: 693 . pc - the preconditioner context 694 695 Application Interface Routine: PCDestroy() 696 */ 697 #undef __FUNCT__ 698 #define __FUNCT__ "PCDestroy_GAMG" 699 PetscErrorCode PCDestroy_GAMG(PC pc) 700 { 701 PetscErrorCode ierr; 702 PC_MG *mg = (PC_MG*)pc->data; 703 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 704 705 PetscFunctionBegin; 706 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 707 if (pc_gamg->ops->destroy) { 708 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 709 } 710 ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr); 711 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 712 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 713 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 714 PetscFunctionReturn(0); 715 } 716 717 #undef __FUNCT__ 718 #define __FUNCT__ "PCGAMGSetProcEqLim" 719 /*@ 720 PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction. 721 722 Logically Collective on PC 723 724 Input Parameters: 725 + pc - the preconditioner context 726 - n - the number of equations 727 728 729 Options Database Key: 730 . -pc_gamg_process_eq_limit <limit> 731 732 Notes: GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process 733 that has degrees of freedom 734 735 Level: intermediate 736 737 Concepts: Unstructured multigrid preconditioner 738 739 .seealso: PCGAMGSetCoarseEqLim() 740 @*/ 741 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 742 { 743 PetscErrorCode ierr; 744 745 PetscFunctionBegin; 746 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 747 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 748 PetscFunctionReturn(0); 749 } 750 751 #undef __FUNCT__ 752 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 753 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 754 { 755 PC_MG *mg = (PC_MG*)pc->data; 756 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 757 758 PetscFunctionBegin; 759 if (n>0) pc_gamg->min_eq_proc = n; 760 PetscFunctionReturn(0); 761 } 762 763 #undef __FUNCT__ 764 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 765 /*@ 766 PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid. 767 768 Collective on PC 769 770 Input Parameters: 771 + pc - the preconditioner context 772 - n - maximum number of equations to aim for 773 774 Options Database Key: 775 . -pc_gamg_coarse_eq_limit <limit> 776 777 Level: intermediate 778 779 Concepts: Unstructured multigrid preconditioner 780 781 .seealso: PCGAMGSetProcEqLim() 782 @*/ 783 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 784 { 785 PetscErrorCode ierr; 786 787 PetscFunctionBegin; 788 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 789 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 790 PetscFunctionReturn(0); 791 } 792 793 #undef __FUNCT__ 794 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 795 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 796 { 797 PC_MG *mg = (PC_MG*)pc->data; 798 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 799 800 PetscFunctionBegin; 801 if (n>0) pc_gamg->coarse_eq_limit = n; 802 PetscFunctionReturn(0); 803 } 804 805 #undef __FUNCT__ 806 #define __FUNCT__ "PCGAMGSetRepartition" 807 /*@ 808 PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids 809 810 Collective on PC 811 812 Input Parameters: 813 + pc - the preconditioner context 814 - n - PETSC_TRUE or PETSC_FALSE 815 816 Options Database Key: 817 . -pc_gamg_repartition <true,false> 818 819 Notes: this will generally improve the loading balancing of the work on each level 820 821 Level: intermediate 822 823 Concepts: Unstructured multigrid preconditioner 824 825 .seealso: () 826 @*/ 827 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n) 828 { 829 PetscErrorCode ierr; 830 831 PetscFunctionBegin; 832 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 833 ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 834 PetscFunctionReturn(0); 835 } 836 837 #undef __FUNCT__ 838 #define __FUNCT__ "PCGAMGSetRepartition_GAMG" 839 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n) 840 { 841 PC_MG *mg = (PC_MG*)pc->data; 842 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 843 844 PetscFunctionBegin; 845 pc_gamg->repart = n; 846 PetscFunctionReturn(0); 847 } 848 849 #undef __FUNCT__ 850 #define __FUNCT__ "PCGAMGSetReuseInterpolation" 851 /*@ 852 PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner 853 854 Collective on PC 855 856 Input Parameters: 857 + pc - the preconditioner context 858 - n - PETSC_TRUE or PETSC_FALSE 859 860 Options Database Key: 861 . -pc_gamg_reuse_interpolation <true,false> 862 863 Level: intermediate 864 865 Notes: this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows 866 rebuilding the preconditioner quicker. 867 868 Concepts: Unstructured multigrid preconditioner 869 870 .seealso: () 871 @*/ 872 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n) 873 { 874 PetscErrorCode ierr; 875 876 PetscFunctionBegin; 877 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 878 ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 879 PetscFunctionReturn(0); 880 } 881 882 #undef __FUNCT__ 883 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG" 884 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) 885 { 886 PC_MG *mg = (PC_MG*)pc->data; 887 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 888 889 PetscFunctionBegin; 890 pc_gamg->reuse_prol = n; 891 PetscFunctionReturn(0); 892 } 893 894 #undef __FUNCT__ 895 #define __FUNCT__ "PCGAMGASMSetUseAggs" 896 /*@ 897 PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner. 898 899 Collective on PC 900 901 Input Parameters: 902 + pc - the preconditioner context 903 - flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not 904 905 Options Database Key: 906 . -pc_gamg_asm_use_agg 907 908 Level: intermediate 909 910 Concepts: Unstructured multigrid preconditioner 911 912 .seealso: () 913 @*/ 914 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg) 915 { 916 PetscErrorCode ierr; 917 918 PetscFunctionBegin; 919 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 920 ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr); 921 PetscFunctionReturn(0); 922 } 923 924 #undef __FUNCT__ 925 #define __FUNCT__ "PCGAMGASMSetUseAggs_GAMG" 926 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg) 927 { 928 PC_MG *mg = (PC_MG*)pc->data; 929 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 930 931 PetscFunctionBegin; 932 pc_gamg->use_aggs_in_asm = flg; 933 PetscFunctionReturn(0); 934 } 935 936 #undef __FUNCT__ 937 #define __FUNCT__ "PCGAMGSetUseParallelCoarseGridSolve" 938 /*@ 939 PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver 940 941 Collective on PC 942 943 Input Parameters: 944 + pc - the preconditioner context 945 - flg - PETSC_TRUE to not force coarse grid onto one processor 946 947 Options Database Key: 948 . -pc_gamg_use_parallel_coarse_grid_solver 949 950 Level: intermediate 951 952 Concepts: Unstructured multigrid preconditioner 953 954 .seealso: () 955 @*/ 956 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg) 957 { 958 PetscErrorCode ierr; 959 960 PetscFunctionBegin; 961 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 962 ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr); 963 PetscFunctionReturn(0); 964 } 965 966 #undef __FUNCT__ 967 #define __FUNCT__ "PCGAMGSetUseParallelCoarseGridSolve_GAMG" 968 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg) 969 { 970 PC_MG *mg = (PC_MG*)pc->data; 971 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 972 973 PetscFunctionBegin; 974 pc_gamg->use_parallel_coarse_grid_solver = flg; 975 PetscFunctionReturn(0); 976 } 977 978 #undef __FUNCT__ 979 #define __FUNCT__ "PCGAMGSetNlevels" 980 /*@ 981 PCGAMGSetNlevels - Sets the maximum number of levels PCGAMG will use 982 983 Not collective on PC 984 985 Input Parameters: 986 + pc - the preconditioner 987 - n - the maximum number of levels to use 988 989 Options Database Key: 990 . -pc_mg_levels 991 992 Level: intermediate 993 994 Concepts: Unstructured multigrid preconditioner 995 996 .seealso: () 997 @*/ 998 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 999 { 1000 PetscErrorCode ierr; 1001 1002 PetscFunctionBegin; 1003 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1004 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1005 PetscFunctionReturn(0); 1006 } 1007 1008 #undef __FUNCT__ 1009 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1010 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1011 { 1012 PC_MG *mg = (PC_MG*)pc->data; 1013 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1014 1015 PetscFunctionBegin; 1016 pc_gamg->Nlevels = n; 1017 PetscFunctionReturn(0); 1018 } 1019 1020 #undef __FUNCT__ 1021 #define __FUNCT__ "PCGAMGSetThreshold" 1022 /*@ 1023 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1024 1025 Not collective on PC 1026 1027 Input Parameters: 1028 + pc - the preconditioner context 1029 - threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph 1030 1031 Options Database Key: 1032 . -pc_gamg_threshold <threshold> 1033 1034 Notes: Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different 1035 (perhaps better) coarser set of points. 1036 1037 Level: intermediate 1038 1039 Concepts: Unstructured multigrid preconditioner 1040 1041 .seealso: () 1042 @*/ 1043 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal v[], PetscInt n) 1044 { 1045 PetscErrorCode ierr; 1046 1047 PetscFunctionBegin; 1048 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1049 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal[],PetscInt),(pc,v,n));CHKERRQ(ierr); 1050 PetscFunctionReturn(0); 1051 } 1052 1053 #undef __FUNCT__ 1054 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1055 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal v[], PetscInt n) 1056 { 1057 PC_MG *mg = (PC_MG*)pc->data; 1058 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1059 PetscInt i; 1060 PetscFunctionBegin; 1061 for (i=0;i<n;i++) pc_gamg->threshold[i] = v[i]; 1062 do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS); 1063 PetscFunctionReturn(0); 1064 } 1065 1066 #undef __FUNCT__ 1067 #define __FUNCT__ "PCGAMGSetThresholdScale" 1068 /*@ 1069 PCGAMGSetThresholdScale - Relative threshold reduction at each level 1070 1071 Not collective on PC 1072 1073 Input Parameters: 1074 + pc - the preconditioner context 1075 - scale - the threshold value reduction, ussually < 1.0 1076 1077 Options Database Key: 1078 . -pc_gamg_threshold_scale <v> 1079 1080 Level: advanced 1081 1082 Concepts: Unstructured multigrid preconditioner 1083 1084 .seealso: () 1085 @*/ 1086 PetscErrorCode PCGAMGSetThresholdScale(PC pc, PetscReal v) 1087 { 1088 PetscErrorCode ierr; 1089 1090 PetscFunctionBegin; 1091 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1092 ierr = PetscTryMethod(pc,"PCGAMGSetThresholdScale_C",(PC,PetscReal),(pc,v));CHKERRQ(ierr); 1093 PetscFunctionReturn(0); 1094 } 1095 1096 #undef __FUNCT__ 1097 #define __FUNCT__ "PCGAMGSetThresholdScale_GAMG" 1098 static PetscErrorCode PCGAMGSetThresholdScale_GAMG(PC pc, PetscReal v) 1099 { 1100 PC_MG *mg = (PC_MG*)pc->data; 1101 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1102 PetscFunctionBegin; 1103 pc_gamg->threshold_scale = v; 1104 PetscFunctionReturn(0); 1105 } 1106 1107 #undef __FUNCT__ 1108 #define __FUNCT__ "PCGAMGSetType" 1109 /*@C 1110 PCGAMGSetType - Set solution method 1111 1112 Collective on PC 1113 1114 Input Parameters: 1115 + pc - the preconditioner context 1116 - type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL 1117 1118 Options Database Key: 1119 . -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply 1120 1121 Level: intermediate 1122 1123 Concepts: Unstructured multigrid preconditioner 1124 1125 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType 1126 @*/ 1127 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1128 { 1129 PetscErrorCode ierr; 1130 1131 PetscFunctionBegin; 1132 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1133 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1134 PetscFunctionReturn(0); 1135 } 1136 1137 #undef __FUNCT__ 1138 #define __FUNCT__ "PCGAMGGetType" 1139 /*@C 1140 PCGAMGGetType - Get solution method 1141 1142 Collective on PC 1143 1144 Input Parameter: 1145 . pc - the preconditioner context 1146 1147 Output Parameter: 1148 . type - the type of algorithm used 1149 1150 Level: intermediate 1151 1152 Concepts: Unstructured multigrid preconditioner 1153 1154 .seealso: PCGAMGSetType(), PCGAMGType 1155 @*/ 1156 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type) 1157 { 1158 PetscErrorCode ierr; 1159 1160 PetscFunctionBegin; 1161 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1162 ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr); 1163 PetscFunctionReturn(0); 1164 } 1165 1166 #undef __FUNCT__ 1167 #define __FUNCT__ "PCGAMGGetType_GAMG" 1168 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type) 1169 { 1170 PC_MG *mg = (PC_MG*)pc->data; 1171 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1172 1173 PetscFunctionBegin; 1174 *type = pc_gamg->type; 1175 PetscFunctionReturn(0); 1176 } 1177 1178 #undef __FUNCT__ 1179 #define __FUNCT__ "PCGAMGSetType_GAMG" 1180 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1181 { 1182 PetscErrorCode ierr,(*r)(PC); 1183 PC_MG *mg = (PC_MG*)pc->data; 1184 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1185 1186 PetscFunctionBegin; 1187 pc_gamg->type = type; 1188 ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr); 1189 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1190 if (pc_gamg->ops->destroy) { 1191 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1192 ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr); 1193 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1194 /* cleaning up common data in pc_gamg - this should disapear someday */ 1195 pc_gamg->data_cell_cols = 0; 1196 pc_gamg->data_cell_rows = 0; 1197 pc_gamg->orig_data_cell_cols = 0; 1198 pc_gamg->orig_data_cell_rows = 0; 1199 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 1200 pc_gamg->data_sz = 0; 1201 } 1202 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1203 ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr); 1204 ierr = (*r)(pc);CHKERRQ(ierr); 1205 PetscFunctionReturn(0); 1206 } 1207 1208 #undef __FUNCT__ 1209 #define __FUNCT__ "PCView_GAMG" 1210 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer) 1211 { 1212 PetscErrorCode ierr,i; 1213 PC_MG *mg = (PC_MG*)pc->data; 1214 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1215 1216 PetscFunctionBegin; 1217 ierr = PetscViewerASCIIPrintf(viewer," GAMG specific options\n");CHKERRQ(ierr); 1218 ierr = PetscViewerASCIIPrintf(viewer," Threshold for dropping small values in graph on each level ="); 1219 for (i=0;i<pc_gamg->current_level;i++) { 1220 ierr = PetscViewerASCIIPrintf(viewer," %g",(double)pc_gamg->threshold[i]);CHKERRQ(ierr); 1221 } 1222 ierr = PetscViewerASCIIPrintf(viewer,"\n"); 1223 ierr = PetscViewerASCIIPrintf(viewer," Threshold scaling factor for each level not specified = %g",(double)pc_gamg->threshold_scale); 1224 if (pc_gamg->use_aggs_in_asm) { 1225 ierr = PetscViewerASCIIPrintf(viewer," Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr); 1226 } 1227 if (pc_gamg->use_parallel_coarse_grid_solver) { 1228 ierr = PetscViewerASCIIPrintf(viewer," Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr); 1229 } 1230 if (pc_gamg->ops->view) { 1231 ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr); 1232 } 1233 PetscFunctionReturn(0); 1234 } 1235 1236 #undef __FUNCT__ 1237 #define __FUNCT__ "PCSetFromOptions_GAMG" 1238 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc) 1239 { 1240 PetscErrorCode ierr; 1241 PC_MG *mg = (PC_MG*)pc->data; 1242 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1243 PetscBool flag; 1244 MPI_Comm comm; 1245 char prefix[256]; 1246 PetscInt i,n; 1247 const char *pcpre; 1248 1249 PetscFunctionBegin; 1250 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1251 ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr); 1252 { 1253 char tname[256]; 1254 ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr); 1255 if (flag) { 1256 ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr); 1257 } 1258 ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr); 1259 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr); 1260 ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation agragates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr); 1261 ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr); 1262 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr); 1263 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr); 1264 ierr = PetscOptionsReal("-pc_gamg_threshold_scale","scaleing of threshold for each level not specified","PCGAMGSetThresholdScale",pc_gamg->threshold_scale,&pc_gamg->threshold_scale,NULL);CHKERRQ(ierr); 1265 n = PETSC_GAMG_MAXLEVELS; 1266 ierr = PetscOptionsRealArray("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&n,&flag);CHKERRQ(ierr); 1267 if (!flag || n < PETSC_GAMG_MAXLEVELS) { 1268 if (!flag) n = 1; 1269 i = n; 1270 do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS); 1271 } 1272 ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 1273 1274 /* set options for subtype */ 1275 if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);} 1276 } 1277 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 1278 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 1279 ierr = PetscOptionsTail();CHKERRQ(ierr); 1280 PetscFunctionReturn(0); 1281 } 1282 1283 /* -------------------------------------------------------------------------- */ 1284 /*MC 1285 PCGAMG - Geometric algebraic multigrid (AMG) preconditioner 1286 1287 Options Database Keys: 1288 + -pc_gamg_type <type> - one of agg, geo, or classical 1289 . -pc_gamg_repartition <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined 1290 . -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations 1291 . -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother 1292 . -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> 1293 equations on each process that has degrees of freedom 1294 . -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for. 1295 - -pc_gamg_threshold[] <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph on each level 1296 - -pc_gamg_threshold_scale <scale,default=1> - Scaling of threshold on each coarser grid if not specified 1297 1298 Options Database Keys for default Aggregation: 1299 + -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation 1300 . -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation 1301 - -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it 1302 1303 Multigrid options(inherited): 1304 + -pc_mg_cycles <v>: v or w (PCMGSetCycleType()) 1305 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1306 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1307 . -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1308 - -pc_mg_levels <levels> - Number of levels of multigrid to use. 1309 1310 1311 Notes: In order to obtain good performance for PCGAMG for vector valued problems you must 1312 $ Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point 1313 $ Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator 1314 $ See the Users Manual Chapter 4 for more details 1315 1316 Level: intermediate 1317 1318 Concepts: algebraic multigrid 1319 1320 .seealso: PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 1321 PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation() 1322 M*/ 1323 1324 #undef __FUNCT__ 1325 #define __FUNCT__ "PCCreate_GAMG" 1326 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) 1327 { 1328 PetscErrorCode ierr,i; 1329 PC_GAMG *pc_gamg; 1330 PC_MG *mg; 1331 1332 PetscFunctionBegin; 1333 /* register AMG type */ 1334 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1335 1336 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1337 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); 1338 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1339 1340 /* create a supporting struct and attach it to pc */ 1341 ierr = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr); 1342 ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr); 1343 mg = (PC_MG*)pc->data; 1344 mg->innerctx = pc_gamg; 1345 1346 ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr); 1347 1348 pc_gamg->setup_count = 0; 1349 /* these should be in subctx but repartitioning needs simple arrays */ 1350 pc_gamg->data_sz = 0; 1351 pc_gamg->data = 0; 1352 1353 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1354 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1355 pc->ops->setup = PCSetUp_GAMG; 1356 pc->ops->reset = PCReset_GAMG; 1357 pc->ops->destroy = PCDestroy_GAMG; 1358 mg->view = PCView_GAMG; 1359 1360 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1361 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1362 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr); 1363 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr); 1364 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr); 1365 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr); 1366 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1367 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThresholdScale_C",PCGAMGSetThresholdScale_GAMG);CHKERRQ(ierr); 1368 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr); 1369 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr); 1370 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr); 1371 pc_gamg->repart = PETSC_FALSE; 1372 pc_gamg->reuse_prol = PETSC_FALSE; 1373 pc_gamg->use_aggs_in_asm = PETSC_FALSE; 1374 pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE; 1375 pc_gamg->min_eq_proc = 50; 1376 pc_gamg->coarse_eq_limit = 50; 1377 for (i=0;i<PETSC_GAMG_MAXLEVELS;i++) pc_gamg->threshold[i] = 0.; 1378 pc_gamg->threshold_scale = 1.; 1379 pc_gamg->Nlevels = PETSC_GAMG_MAXLEVELS; 1380 pc_gamg->current_level = 0; /* don't need to init really */ 1381 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1382 1383 /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */ 1384 ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr); 1385 PetscFunctionReturn(0); 1386 } 1387 1388 #undef __FUNCT__ 1389 #define __FUNCT__ "PCGAMGInitializePackage" 1390 /*@C 1391 PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called 1392 from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG() 1393 when using static libraries. 1394 1395 Level: developer 1396 1397 .keywords: PC, PCGAMG, initialize, package 1398 .seealso: PetscInitialize() 1399 @*/ 1400 PetscErrorCode PCGAMGInitializePackage(void) 1401 { 1402 PetscErrorCode ierr; 1403 1404 PetscFunctionBegin; 1405 if (PCGAMGPackageInitialized) PetscFunctionReturn(0); 1406 PCGAMGPackageInitialized = PETSC_TRUE; 1407 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr); 1408 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr); 1409 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr); 1410 ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr); 1411 1412 /* general events */ 1413 ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr); 1414 ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr); 1415 ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1416 ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1417 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1418 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1419 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr); 1420 1421 #if defined PETSC_GAMG_USE_LOG 1422 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1423 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1424 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1425 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1426 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1427 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1428 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1429 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1430 ierr = PetscLogEventRegister(" search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1431 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1432 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1433 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1434 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1435 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1436 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1437 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1438 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1439 1440 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1441 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1442 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1443 /* create timer stages */ 1444 #if defined GAMG_STAGES 1445 { 1446 char str[32]; 1447 PetscInt lidx; 1448 sprintf(str,"MG Level %d (finest)",0); 1449 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1450 for (lidx=1; lidx<9; lidx++) { 1451 sprintf(str,"MG Level %d",lidx); 1452 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1453 } 1454 } 1455 #endif 1456 #endif 1457 PetscFunctionReturn(0); 1458 } 1459 1460 #undef __FUNCT__ 1461 #define __FUNCT__ "PCGAMGFinalizePackage" 1462 /*@C 1463 PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is 1464 called from PetscFinalize() automatically. 1465 1466 Level: developer 1467 1468 .keywords: Petsc, destroy, package 1469 .seealso: PetscFinalize() 1470 @*/ 1471 PetscErrorCode PCGAMGFinalizePackage(void) 1472 { 1473 PetscErrorCode ierr; 1474 1475 PetscFunctionBegin; 1476 PCGAMGPackageInitialized = PETSC_FALSE; 1477 ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr); 1478 PetscFunctionReturn(0); 1479 } 1480 1481 #undef __FUNCT__ 1482 #define __FUNCT__ "PCGAMGRegister" 1483 /*@C 1484 PCGAMGRegister - Register a PCGAMG implementation. 1485 1486 Input Parameters: 1487 + type - string that will be used as the name of the GAMG type. 1488 - create - function for creating the gamg context. 1489 1490 Level: advanced 1491 1492 .seealso: PCGAMGType, PCGAMG, PCGAMGSetType() 1493 @*/ 1494 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC)) 1495 { 1496 PetscErrorCode ierr; 1497 1498 PetscFunctionBegin; 1499 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1500 ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr); 1501 PetscFunctionReturn(0); 1502 } 1503 1504