1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc/private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc/private/kspimpl.h> 7 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */ 8 9 #if defined PETSC_GAMG_USE_LOG 10 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 11 #endif 12 13 #if defined PETSC_USE_LOG 14 PetscLogEvent PC_GAMGGraph_AGG; 15 PetscLogEvent PC_GAMGGraph_GEO; 16 PetscLogEvent PC_GAMGCoarsen_AGG; 17 PetscLogEvent PC_GAMGCoarsen_GEO; 18 PetscLogEvent PC_GAMGProlongator_AGG; 19 PetscLogEvent PC_GAMGProlongator_GEO; 20 PetscLogEvent PC_GAMGOptProlongator_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFunctionList GAMGList = 0; 31 static PetscBool PCGAMGPackageInitialized; 32 33 /* ----------------------------------------------------------------------------- */ 34 #undef __FUNCT__ 35 #define __FUNCT__ "PCReset_GAMG" 36 PetscErrorCode PCReset_GAMG(PC pc) 37 { 38 PetscErrorCode ierr; 39 PC_MG *mg = (PC_MG*)pc->data; 40 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 41 42 PetscFunctionBegin; 43 if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n"); 44 pc_gamg->data_sz = 0; 45 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 46 PetscFunctionReturn(0); 47 } 48 49 /* -------------------------------------------------------------------------- */ 50 /* 51 PCGAMGCreateLevel_GAMG: create coarse op with RAP. repartition and/or reduce number 52 of active processors. 53 54 Input Parameter: 55 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 56 'pc_gamg->data_sz' are changed via repartitioning/reduction. 57 . Amat_fine - matrix on this fine (k) level 58 . cr_bs - coarse block size 59 In/Output Parameter: 60 . a_P_inout - prolongation operator to the next level (k-->k-1) 61 . a_nactive_proc - number of active procs 62 Output Parameter: 63 . a_Amat_crs - coarse matrix that is created (k-1) 64 */ 65 66 #undef __FUNCT__ 67 #define __FUNCT__ "PCGAMGCreateLevel_GAMG" 68 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm) 69 { 70 PetscErrorCode ierr; 71 PC_MG *mg = (PC_MG*)pc->data; 72 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 73 Mat Cmat,Pold=*a_P_inout; 74 MPI_Comm comm; 75 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 76 PetscInt ncrs_eq,ncrs,f_bs; 77 78 PetscFunctionBegin; 79 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 80 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 81 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 82 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 83 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 84 85 /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/ 86 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 87 if (pc_gamg->data_cell_rows>0) { 88 ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 89 } else { 90 PetscInt bs; 91 ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr); 92 ncrs = ncrs_eq/bs; 93 } 94 95 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 96 { 97 PetscInt ncrs_eq_glob; 98 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 99 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 100 if (!new_size) new_size = 1; /* not likely, posible? */ 101 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 102 } 103 104 if (Pcolumnperm) *Pcolumnperm = NULL; 105 106 if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 107 else { 108 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old; 109 IS is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices; 110 111 nloc_old = ncrs_eq/cr_bs; 112 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 113 #if defined PETSC_GAMG_USE_LOG 114 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 115 #endif 116 /* make 'is_eq_newproc' */ 117 ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr); 118 if (pc_gamg->repart) { 119 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 120 Mat adj; 121 122 ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, neq = %D\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr); 123 124 /* get 'adj' */ 125 if (cr_bs == 1) { 126 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 127 } else { 128 /* make a scalar matrix to partition (no Stokes here) */ 129 Mat tMat; 130 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 131 const PetscScalar *vals; 132 const PetscInt *idx; 133 PetscInt *d_nnz, *o_nnz, M, N; 134 static PetscInt llev = 0; 135 MatType mtype; 136 137 ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr); 138 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 139 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 140 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 141 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 142 d_nnz[jj] = ncols/cr_bs; 143 o_nnz[jj] = ncols/cr_bs; 144 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 145 if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs; 146 if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs; 147 } 148 149 ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr); 150 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 151 ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 152 ierr = MatSetType(tMat,mtype);CHKERRQ(ierr); 153 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 154 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 155 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 156 157 for (ii = Istart_crs; ii < Iend_crs; ii++) { 158 PetscInt dest_row = ii/cr_bs; 159 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 160 for (jj = 0; jj < ncols; jj++) { 161 PetscInt dest_col = idx[jj]/cr_bs; 162 PetscScalar v = 1.0; 163 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 164 } 165 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 166 } 167 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 168 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 169 170 if (llev++ == -1) { 171 PetscViewer viewer; char fname[32]; 172 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 173 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 174 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 175 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 176 } 177 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 178 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 179 } /* create 'adj' */ 180 181 { /* partition: get newproc_idx */ 182 char prefix[256]; 183 const char *pcpre; 184 const PetscInt *is_idx; 185 MatPartitioning mpart; 186 IS proc_is; 187 PetscInt targetPE; 188 189 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 190 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 191 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 192 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 193 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 194 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 195 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 196 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 197 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 198 199 /* collect IS info */ 200 ierr = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr); 201 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 202 targetPE = 1; /* bring to "front" of machine */ 203 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 204 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 205 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 206 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 207 } 208 } 209 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 210 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 211 } 212 ierr = MatDestroy(&adj);CHKERRQ(ierr); 213 214 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 215 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 216 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 217 PetscInt rfactor,targetPE; 218 219 /* find factor */ 220 if (new_size == 1) rfactor = size; /* easy */ 221 else { 222 PetscReal best_fact = 0.; 223 jj = -1; 224 for (kk = 1 ; kk <= size ; kk++) { 225 if (!(size%kk)) { /* a candidate */ 226 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 227 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 228 if (fact > best_fact) { 229 best_fact = fact; jj = kk; 230 } 231 } 232 } 233 if (jj != -1) rfactor = jj; 234 else rfactor = 1; /* does this happen .. a prime */ 235 } 236 new_size = size/rfactor; 237 238 if (new_size==nactive) { 239 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 240 ierr = PetscFree(counts);CHKERRQ(ierr); 241 ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr); 242 #if defined PETSC_GAMG_USE_LOG 243 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 244 #endif 245 PetscFunctionReturn(0); 246 } 247 248 ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr); 249 targetPE = rank/rfactor; 250 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 251 } /* end simple 'is_eq_newproc' */ 252 253 /* 254 Create an index set from the is_eq_newproc index set to indicate the mapping TO 255 */ 256 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 257 is_eq_num_prim = is_eq_num; 258 /* 259 Determine how many equations/vertices are assigned to each processor 260 */ 261 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 262 ncrs_eq_new = counts[rank]; 263 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 264 ncrs_new = ncrs_eq_new/cr_bs; /* eqs */ 265 266 ierr = PetscFree(counts);CHKERRQ(ierr); 267 #if defined PETSC_GAMG_USE_LOG 268 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 269 #endif 270 /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */ 271 { 272 Vec src_crd, dest_crd; 273 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 274 VecScatter vecscat; 275 PetscScalar *array; 276 IS isscat; 277 278 /* move data (for primal equations only) */ 279 /* Create a vector to contain the newly ordered element information */ 280 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 281 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr); 282 ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */ 283 /* 284 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 285 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 286 */ 287 ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr); 288 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 289 for (ii=0,jj=0; ii<ncrs; ii++) { 290 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 291 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 292 } 293 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 294 ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 295 ierr = PetscFree(tidx);CHKERRQ(ierr); 296 /* 297 Create a vector to contain the original vertex information for each element 298 */ 299 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr); 300 for (jj=0; jj<ndata_cols; jj++) { 301 const PetscInt stride0=ncrs*pc_gamg->data_cell_rows; 302 for (ii=0; ii<ncrs; ii++) { 303 for (kk=0; kk<ndata_rows; kk++) { 304 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 305 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 306 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 307 } 308 } 309 } 310 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 311 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 312 /* 313 Scatter the element vertex information (still in the original vertex ordering) 314 to the correct processor 315 */ 316 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 317 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 318 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 319 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 320 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 321 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 322 /* 323 Put the element vertex data into a new allocation of the gdata->ele 324 */ 325 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 326 ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr); 327 328 pc_gamg->data_sz = node_data_sz*ncrs_new; 329 strideNew = ncrs_new*ndata_rows; 330 331 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 332 for (jj=0; jj<ndata_cols; jj++) { 333 for (ii=0; ii<ncrs_new; ii++) { 334 for (kk=0; kk<ndata_rows; kk++) { 335 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 336 pc_gamg->data[ix] = PetscRealPart(array[jx]); 337 } 338 } 339 } 340 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 341 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 342 } 343 /* move A and P (columns) with new layout */ 344 #if defined PETSC_GAMG_USE_LOG 345 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 346 #endif 347 348 /* 349 Invert for MatGetSubMatrix 350 */ 351 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 352 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 353 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 354 if (is_eq_num != is_eq_num_prim) { 355 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 356 } 357 if (Pcolumnperm) { 358 ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr); 359 *Pcolumnperm = new_eq_indices; 360 } 361 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 362 #if defined PETSC_GAMG_USE_LOG 363 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 364 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 365 #endif 366 /* 'a_Amat_crs' output */ 367 { 368 Mat mat; 369 ierr = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 370 *a_Amat_crs = mat; 371 } 372 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 373 374 #if defined PETSC_GAMG_USE_LOG 375 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 376 #endif 377 /* prolongator */ 378 { 379 IS findices; 380 PetscInt Istart,Iend; 381 Mat Pnew; 382 383 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 384 #if defined PETSC_GAMG_USE_LOG 385 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 386 #endif 387 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 388 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 389 ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 390 ierr = ISDestroy(&findices);CHKERRQ(ierr); 391 392 #if defined PETSC_GAMG_USE_LOG 393 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 394 #endif 395 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 396 397 /* output - repartitioned */ 398 *a_P_inout = Pnew; 399 } 400 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 401 402 *a_nactive_proc = new_size; /* output */ 403 } 404 PetscFunctionReturn(0); 405 } 406 407 /* -------------------------------------------------------------------------- */ 408 /* 409 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 410 by setting data structures and options. 411 412 Input Parameter: 413 . pc - the preconditioner context 414 415 */ 416 #undef __FUNCT__ 417 #define __FUNCT__ "PCSetUp_GAMG" 418 PetscErrorCode PCSetUp_GAMG(PC pc) 419 { 420 PetscErrorCode ierr; 421 PC_MG *mg = (PC_MG*)pc->data; 422 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 423 Mat Pmat = pc->pmat; 424 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 425 MPI_Comm comm; 426 PetscMPIInt rank,size,nactivepe; 427 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 428 IS *ASMLocalIDsArr[GAMG_MAXLEVELS]; 429 PetscLogDouble nnz0=0.,nnztot=0.; 430 MatInfo info; 431 432 PetscFunctionBegin; 433 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 434 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 435 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 436 437 if (pc_gamg->setup_count++ > 0) { 438 if ((PetscBool)(!pc_gamg->reuse_prol)) { 439 /* reset everything */ 440 ierr = PCReset_MG(pc);CHKERRQ(ierr); 441 pc->setupcalled = 0; 442 } else { 443 PC_MG_Levels **mglevels = mg->levels; 444 /* just do Galerkin grids */ 445 Mat B,dA,dB; 446 447 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 448 if (pc_gamg->Nlevels > 1) { 449 /* currently only handle case where mat and pmat are the same on coarser levels */ 450 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr); 451 /* (re)set to get dirty flag */ 452 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr); 453 454 for (level=pc_gamg->Nlevels-2; level>=0; level--) { 455 /* the first time through the matrix structure has changed from repartitioning */ 456 if (pc_gamg->setup_count==2) { 457 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 458 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 459 460 mglevels[level]->A = B; 461 } else { 462 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr); 463 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 464 } 465 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr); 466 dB = B; 467 } 468 } 469 470 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 471 PetscFunctionReturn(0); 472 } 473 } 474 475 if (!pc_gamg->data) { 476 if (pc_gamg->orig_data) { 477 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 478 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 479 480 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 481 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 482 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 483 484 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr); 485 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 486 } else { 487 if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 488 ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr); 489 } 490 } 491 492 /* cache original data for reuse */ 493 if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) { 494 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr); 495 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 496 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 497 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 498 } 499 500 /* get basic dims */ 501 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 502 ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr); 503 504 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */ 505 nnz0 = info.nz_used; 506 nnztot = info.nz_used; 507 ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr); 508 509 /* Get A_i and R_i */ 510 for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) { 511 pc_gamg->current_level = level; 512 level1 = level + 1; 513 #if defined PETSC_GAMG_USE_LOG 514 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 515 #if (defined GAMG_STAGES) 516 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 517 #endif 518 #endif 519 { /* construct prolongator */ 520 Mat Gmat; 521 PetscCoarsenData *agg_lists; 522 Mat Prol11; 523 524 ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr); 525 ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 526 ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr); 527 528 /* could have failed to create new level */ 529 if (Prol11) { 530 /* get new block size of coarse matrices */ 531 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 532 533 if (pc_gamg->ops->optprolongator) { 534 /* smooth */ 535 ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr); 536 } 537 538 Parr[level1] = Prol11; 539 } else Parr[level1] = NULL; 540 541 if (pc_gamg->use_aggs_in_gasm) { 542 PetscInt bs; 543 ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr); 544 ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 545 } 546 547 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 548 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 549 } /* construct prolongator scope */ 550 #if defined PETSC_GAMG_USE_LOG 551 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 552 #endif 553 if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 554 if (!Parr[level1]) { 555 ierr = PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr); 556 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES 557 ierr = PetscLogStagePop();CHKERRQ(ierr); 558 #endif 559 break; 560 } 561 #if defined PETSC_GAMG_USE_LOG 562 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 563 #endif 564 565 ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs,&Parr[level1], &Aarr[level1], &nactivepe, NULL);CHKERRQ(ierr); 566 567 #if defined PETSC_GAMG_USE_LOG 568 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 569 #endif 570 ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr); 571 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 572 nnztot += info.nz_used; 573 ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr); 574 575 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 576 ierr = PetscLogStagePop();CHKERRQ(ierr); 577 #endif 578 /* stop if one node or one proc -- could pull back for singular problems */ 579 if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) { 580 ierr = PetscInfo2(pc,"HARD stop of coarsening on level %D. Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr); 581 level++; 582 break; 583 } 584 } /* levels */ 585 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 586 587 ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr); 588 pc_gamg->Nlevels = level + 1; 589 fine_level = level; 590 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 591 592 if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 593 /* set default smoothers & set operators */ 594 for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) { 595 KSP smoother; 596 PC subpc; 597 598 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 599 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 600 601 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 602 /* set ops */ 603 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr); 604 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 605 606 /* set defaults */ 607 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 608 609 /* set blocks for GASM smoother that uses the 'aggregates' */ 610 if (pc_gamg->use_aggs_in_gasm) { 611 PetscInt sz; 612 IS *is; 613 614 sz = nASMBlocksArr[level]; 615 is = ASMLocalIDsArr[level]; 616 ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr); 617 ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr); 618 if (!sz) { 619 IS is; 620 PetscInt my0,kk; 621 ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr); 622 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 623 ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr); 624 ierr = ISDestroy(&is);CHKERRQ(ierr); 625 } else { 626 PetscInt kk; 627 ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr); 628 for (kk=0; kk<sz; kk++) { 629 ierr = ISDestroy(&is[kk]);CHKERRQ(ierr); 630 } 631 ierr = PetscFree(is);CHKERRQ(ierr); 632 } 633 ASMLocalIDsArr[level] = NULL; 634 nASMBlocksArr[level] = 0; 635 ierr = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr); 636 } else { 637 ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr); 638 } 639 } 640 { 641 /* coarse grid */ 642 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 643 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 644 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 645 ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr); 646 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 647 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 648 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 649 ierr = PCSetUp(subpc);CHKERRQ(ierr); 650 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 651 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 652 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 653 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 654 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 655 ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr); 656 ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr); 657 /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in 658 * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to 659 * view every subdomain as though they were different. */ 660 ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE; 661 } 662 663 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 664 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 665 ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr); 666 ierr = PetscOptionsEnd();CHKERRQ(ierr); 667 if (!mg->galerkin) SETERRQ(comm,PETSC_ERR_USER,"PCGAMG must use Galerkin for coarse operators."); 668 if (mg->galerkin == 1) mg->galerkin = 2; 669 670 /* clean up */ 671 for (level=1; level<pc_gamg->Nlevels; level++) { 672 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 673 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 674 } 675 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 676 } else { 677 KSP smoother; 678 ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr); 679 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 680 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr); 681 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 682 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 683 } 684 PetscFunctionReturn(0); 685 } 686 687 /* ------------------------------------------------------------------------- */ 688 /* 689 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 690 that was created with PCCreate_GAMG(). 691 692 Input Parameter: 693 . pc - the preconditioner context 694 695 Application Interface Routine: PCDestroy() 696 */ 697 #undef __FUNCT__ 698 #define __FUNCT__ "PCDestroy_GAMG" 699 PetscErrorCode PCDestroy_GAMG(PC pc) 700 { 701 PetscErrorCode ierr; 702 PC_MG *mg = (PC_MG*)pc->data; 703 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 704 705 PetscFunctionBegin; 706 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 707 if (pc_gamg->ops->destroy) { 708 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 709 } 710 ierr = PetscRandomDestroy(&pc_gamg->random);CHKERRQ(ierr); 711 ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr); 712 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 713 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 714 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 715 PetscFunctionReturn(0); 716 } 717 718 #undef __FUNCT__ 719 #define __FUNCT__ "PCGAMGSetProcEqLim" 720 /*@ 721 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction. 722 723 Logically Collective on PC 724 725 Input Parameters: 726 + pc - the preconditioner context 727 - n - the number of equations 728 729 730 Options Database Key: 731 . -pc_gamg_process_eq_limit <limit> 732 733 Level: intermediate 734 735 Concepts: Unstructured multigrid preconditioner 736 737 .seealso: PCGAMGSetCoarseEqLim() 738 @*/ 739 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 740 { 741 PetscErrorCode ierr; 742 743 PetscFunctionBegin; 744 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 745 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 746 PetscFunctionReturn(0); 747 } 748 749 #undef __FUNCT__ 750 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 751 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 752 { 753 PC_MG *mg = (PC_MG*)pc->data; 754 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 755 756 PetscFunctionBegin; 757 if (n>0) pc_gamg->min_eq_proc = n; 758 PetscFunctionReturn(0); 759 } 760 761 #undef __FUNCT__ 762 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 763 /*@ 764 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 765 766 Collective on PC 767 768 Input Parameters: 769 + pc - the preconditioner context 770 - n - maximum number of equations to aim for 771 772 Options Database Key: 773 . -pc_gamg_coarse_eq_limit <limit> 774 775 Level: intermediate 776 777 Concepts: Unstructured multigrid preconditioner 778 779 .seealso: PCGAMGSetProcEqLim() 780 @*/ 781 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 782 { 783 PetscErrorCode ierr; 784 785 PetscFunctionBegin; 786 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 787 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 788 PetscFunctionReturn(0); 789 } 790 791 #undef __FUNCT__ 792 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 793 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 794 { 795 PC_MG *mg = (PC_MG*)pc->data; 796 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 797 798 PetscFunctionBegin; 799 if (n>0) pc_gamg->coarse_eq_limit = n; 800 PetscFunctionReturn(0); 801 } 802 803 #undef __FUNCT__ 804 #define __FUNCT__ "PCGAMGSetRepartitioning" 805 /*@ 806 PCGAMGSetRepartitioning - Repartition the coarse grids 807 808 Collective on PC 809 810 Input Parameters: 811 + pc - the preconditioner context 812 - n - PETSC_TRUE or PETSC_FALSE 813 814 Options Database Key: 815 . -pc_gamg_repartition <true,false> 816 817 Level: intermediate 818 819 Concepts: Unstructured multigrid preconditioner 820 821 .seealso: () 822 @*/ 823 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 824 { 825 PetscErrorCode ierr; 826 827 PetscFunctionBegin; 828 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 829 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 830 PetscFunctionReturn(0); 831 } 832 833 #undef __FUNCT__ 834 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 835 static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 836 { 837 PC_MG *mg = (PC_MG*)pc->data; 838 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 839 840 PetscFunctionBegin; 841 pc_gamg->repart = n; 842 PetscFunctionReturn(0); 843 } 844 845 #undef __FUNCT__ 846 #define __FUNCT__ "PCGAMGSetReuseInterpolation" 847 /*@ 848 PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner 849 850 Collective on PC 851 852 Input Parameters: 853 + pc - the preconditioner context 854 - n - PETSC_TRUE or PETSC_FALSE 855 856 Options Database Key: 857 . -pc_gamg_reuse_interpolation <true,false> 858 859 Level: intermediate 860 861 Concepts: Unstructured multigrid preconditioner 862 863 .seealso: () 864 @*/ 865 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n) 866 { 867 PetscErrorCode ierr; 868 869 PetscFunctionBegin; 870 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 871 ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 872 PetscFunctionReturn(0); 873 } 874 875 #undef __FUNCT__ 876 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG" 877 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) 878 { 879 PC_MG *mg = (PC_MG*)pc->data; 880 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 881 882 PetscFunctionBegin; 883 pc_gamg->reuse_prol = n; 884 PetscFunctionReturn(0); 885 } 886 887 #undef __FUNCT__ 888 #define __FUNCT__ "PCGAMGSetUseASMAggs" 889 /*@ 890 PCGAMGSetUseASMAggs - 891 892 Collective on PC 893 894 Input Parameters: 895 . pc - the preconditioner context 896 897 898 Options Database Key: 899 . -pc_gamg_use_agg_gasm 900 901 Level: intermediate 902 903 Concepts: Unstructured multigrid preconditioner 904 905 .seealso: () 906 @*/ 907 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 908 { 909 PetscErrorCode ierr; 910 911 PetscFunctionBegin; 912 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 913 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 914 PetscFunctionReturn(0); 915 } 916 917 #undef __FUNCT__ 918 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 919 static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 920 { 921 PC_MG *mg = (PC_MG*)pc->data; 922 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 923 924 PetscFunctionBegin; 925 pc_gamg->use_aggs_in_gasm = n; 926 PetscFunctionReturn(0); 927 } 928 929 #undef __FUNCT__ 930 #define __FUNCT__ "PCGAMGSetNlevels" 931 /*@ 932 PCGAMGSetNlevels - Sets the maximum number of levels PCGAMG will use 933 934 Not collective on PC 935 936 Input Parameters: 937 + pc - the preconditioner 938 - n - the maximum number of levels to use 939 940 Options Database Key: 941 . -pc_mg_levels 942 943 Level: intermediate 944 945 Concepts: Unstructured multigrid preconditioner 946 947 .seealso: () 948 @*/ 949 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 950 { 951 PetscErrorCode ierr; 952 953 PetscFunctionBegin; 954 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 955 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 956 PetscFunctionReturn(0); 957 } 958 959 #undef __FUNCT__ 960 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 961 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 962 { 963 PC_MG *mg = (PC_MG*)pc->data; 964 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 965 966 PetscFunctionBegin; 967 pc_gamg->Nlevels = n; 968 PetscFunctionReturn(0); 969 } 970 971 #undef __FUNCT__ 972 #define __FUNCT__ "PCGAMGSetThreshold" 973 /*@ 974 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 975 976 Not collective on PC 977 978 Input Parameters: 979 + pc - the preconditioner context 980 - threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph 981 982 Options Database Key: 983 . -pc_gamg_threshold <threshold> 984 985 Level: intermediate 986 987 Concepts: Unstructured multigrid preconditioner 988 989 .seealso: () 990 @*/ 991 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 992 { 993 PetscErrorCode ierr; 994 995 PetscFunctionBegin; 996 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 997 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 998 PetscFunctionReturn(0); 999 } 1000 1001 #undef __FUNCT__ 1002 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1003 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1004 { 1005 PC_MG *mg = (PC_MG*)pc->data; 1006 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1007 1008 PetscFunctionBegin; 1009 pc_gamg->threshold = n; 1010 PetscFunctionReturn(0); 1011 } 1012 1013 #undef __FUNCT__ 1014 #define __FUNCT__ "PCGAMGSetType" 1015 /*@ 1016 PCGAMGSetType - Set solution method 1017 1018 Collective on PC 1019 1020 Input Parameters: 1021 + pc - the preconditioner context 1022 - type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL 1023 1024 Options Database Key: 1025 . -pc_gamg_type <agg,geo,classical> 1026 1027 Level: intermediate 1028 1029 Concepts: Unstructured multigrid preconditioner 1030 1031 .seealso: PCGAMGGetType(), PCGAMG 1032 @*/ 1033 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1034 { 1035 PetscErrorCode ierr; 1036 1037 PetscFunctionBegin; 1038 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1039 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1040 PetscFunctionReturn(0); 1041 } 1042 1043 #undef __FUNCT__ 1044 #define __FUNCT__ "PCGAMGGetType" 1045 /*@ 1046 PCGAMGGetType - Get solution method 1047 1048 Collective on PC 1049 1050 Input Parameter: 1051 . pc - the preconditioner context 1052 1053 Output Parameter: 1054 . type - the type of algorithm used 1055 1056 Level: intermediate 1057 1058 Concepts: Unstructured multigrid preconditioner 1059 1060 .seealso: PCGAMGSetType(), PCGAMGType 1061 @*/ 1062 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type) 1063 { 1064 PetscErrorCode ierr; 1065 1066 PetscFunctionBegin; 1067 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1068 ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr); 1069 PetscFunctionReturn(0); 1070 } 1071 1072 #undef __FUNCT__ 1073 #define __FUNCT__ "PCGAMGGetType_GAMG" 1074 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type) 1075 { 1076 PC_MG *mg = (PC_MG*)pc->data; 1077 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1078 1079 PetscFunctionBegin; 1080 *type = pc_gamg->type; 1081 PetscFunctionReturn(0); 1082 } 1083 1084 #undef __FUNCT__ 1085 #define __FUNCT__ "PCGAMGSetType_GAMG" 1086 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1087 { 1088 PetscErrorCode ierr,(*r)(PC); 1089 PC_MG *mg = (PC_MG*)pc->data; 1090 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1091 1092 PetscFunctionBegin; 1093 pc_gamg->type = type; 1094 ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr); 1095 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1096 if (pc_gamg->ops->destroy) { 1097 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1098 ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr); 1099 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1100 /* cleaning up common data in pc_gamg - this should disapear someday */ 1101 pc_gamg->data_cell_cols = 0; 1102 pc_gamg->data_cell_rows = 0; 1103 pc_gamg->orig_data_cell_cols = 0; 1104 pc_gamg->orig_data_cell_rows = 0; 1105 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 1106 pc_gamg->data_sz = 0; 1107 } 1108 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1109 ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr); 1110 ierr = (*r)(pc);CHKERRQ(ierr); 1111 PetscFunctionReturn(0); 1112 } 1113 1114 #undef __FUNCT__ 1115 #define __FUNCT__ "PCView_GAMG" 1116 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer) 1117 { 1118 PetscErrorCode ierr; 1119 PC_MG *mg = (PC_MG*)pc->data; 1120 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1121 1122 PetscFunctionBegin; 1123 ierr = PetscViewerASCIIPrintf(viewer," GAMG specific options\n");CHKERRQ(ierr); 1124 ierr = PetscViewerASCIIPrintf(viewer," Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);CHKERRQ(ierr); 1125 if (pc_gamg->ops->view) { 1126 ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr); 1127 } 1128 PetscFunctionReturn(0); 1129 } 1130 1131 #undef __FUNCT__ 1132 #define __FUNCT__ "PCSetFromOptions_GAMG" 1133 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc) 1134 { 1135 PetscErrorCode ierr; 1136 PC_MG *mg = (PC_MG*)pc->data; 1137 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1138 PetscBool flag; 1139 MPI_Comm comm; 1140 char prefix[256]; 1141 const char *pcpre; 1142 1143 PetscFunctionBegin; 1144 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1145 ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr); 1146 { 1147 char tname[256]; 1148 ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr); 1149 if (flag) { 1150 ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr); 1151 } 1152 ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr); 1153 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr); 1154 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);CHKERRQ(ierr); 1155 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr); 1156 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr); 1157 ierr = PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);CHKERRQ(ierr); 1158 ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 1159 1160 /* set options for subtype */ 1161 if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);} 1162 } 1163 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 1164 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 1165 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc_gamg->random,prefix);CHKERRQ(ierr); 1166 ierr = PetscRandomSetFromOptions(pc_gamg->random);CHKERRQ(ierr); 1167 ierr = PetscOptionsTail();CHKERRQ(ierr); 1168 PetscFunctionReturn(0); 1169 } 1170 1171 /* -------------------------------------------------------------------------- */ 1172 /*MC 1173 PCGAMG - Geometric algebraic multigrid (AMG) preconditioner 1174 1175 Options Database Keys: 1176 Multigrid options(inherited) 1177 + -pc_mg_cycles <v>: v or w (PCMGSetCycleType()) 1178 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1179 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1180 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1181 1182 1183 Notes: In order to obtain good performance for PCGAMG for vector valued problems you must 1184 $ Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point 1185 $ Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator 1186 $ See the Users Manual Chapter 4 for more details 1187 1188 Level: intermediate 1189 1190 Concepts: algebraic multigrid 1191 1192 .seealso: PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 1193 PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType() 1194 M*/ 1195 1196 #undef __FUNCT__ 1197 #define __FUNCT__ "PCCreate_GAMG" 1198 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) 1199 { 1200 PetscErrorCode ierr; 1201 PC_GAMG *pc_gamg; 1202 PC_MG *mg; 1203 1204 PetscFunctionBegin; 1205 /* register AMG type */ 1206 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1207 1208 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1209 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); 1210 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1211 1212 /* create a supporting struct and attach it to pc */ 1213 ierr = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr); 1214 mg = (PC_MG*)pc->data; 1215 mg->galerkin = 2; /* Use Galerkin, but it is computed externally from PCMG by GAMG code */ 1216 mg->innerctx = pc_gamg; 1217 1218 ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr); 1219 1220 pc_gamg->setup_count = 0; 1221 /* these should be in subctx but repartitioning needs simple arrays */ 1222 pc_gamg->data_sz = 0; 1223 pc_gamg->data = 0; 1224 1225 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1226 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1227 pc->ops->setup = PCSetUp_GAMG; 1228 pc->ops->reset = PCReset_GAMG; 1229 pc->ops->destroy = PCDestroy_GAMG; 1230 mg->view = PCView_GAMG; 1231 1232 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1233 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1234 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr); 1235 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr); 1236 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr); 1237 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1238 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr); 1239 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr); 1240 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr); 1241 pc_gamg->repart = PETSC_FALSE; 1242 pc_gamg->reuse_prol = PETSC_FALSE; 1243 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1244 pc_gamg->min_eq_proc = 50; 1245 pc_gamg->coarse_eq_limit = 50; 1246 pc_gamg->threshold = 0.; 1247 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1248 pc_gamg->current_level = 0; /* don't need to init really */ 1249 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1250 1251 ierr = PetscRandomCreate(PetscObjectComm((PetscObject)pc),&pc_gamg->random);CHKERRQ(ierr); 1252 1253 /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */ 1254 ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr); 1255 PetscFunctionReturn(0); 1256 } 1257 1258 #undef __FUNCT__ 1259 #define __FUNCT__ "PCGAMGInitializePackage" 1260 /*@C 1261 PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called 1262 from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG() 1263 when using static libraries. 1264 1265 Level: developer 1266 1267 .keywords: PC, PCGAMG, initialize, package 1268 .seealso: PetscInitialize() 1269 @*/ 1270 PetscErrorCode PCGAMGInitializePackage(void) 1271 { 1272 PetscErrorCode ierr; 1273 1274 PetscFunctionBegin; 1275 if (PCGAMGPackageInitialized) PetscFunctionReturn(0); 1276 PCGAMGPackageInitialized = PETSC_TRUE; 1277 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr); 1278 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr); 1279 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr); 1280 ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr); 1281 1282 /* general events */ 1283 ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr); 1284 ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr); 1285 ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1286 ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1287 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1288 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1289 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr); 1290 1291 #if defined PETSC_GAMG_USE_LOG 1292 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1293 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1294 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1295 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1296 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1297 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1298 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1299 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1300 ierr = PetscLogEventRegister(" search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1301 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1302 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1303 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1304 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1305 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1306 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1307 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1308 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1309 1310 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1311 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1312 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1313 /* create timer stages */ 1314 #if defined GAMG_STAGES 1315 { 1316 char str[32]; 1317 PetscInt lidx; 1318 sprintf(str,"MG Level %d (finest)",0); 1319 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1320 for (lidx=1; lidx<9; lidx++) { 1321 sprintf(str,"MG Level %d",lidx); 1322 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1323 } 1324 } 1325 #endif 1326 #endif 1327 PetscFunctionReturn(0); 1328 } 1329 1330 #undef __FUNCT__ 1331 #define __FUNCT__ "PCGAMGFinalizePackage" 1332 /*@C 1333 PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is 1334 called from PetscFinalize() automatically. 1335 1336 Level: developer 1337 1338 .keywords: Petsc, destroy, package 1339 .seealso: PetscFinalize() 1340 @*/ 1341 PetscErrorCode PCGAMGFinalizePackage(void) 1342 { 1343 PetscErrorCode ierr; 1344 1345 PetscFunctionBegin; 1346 PCGAMGPackageInitialized = PETSC_FALSE; 1347 ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr); 1348 PetscFunctionReturn(0); 1349 } 1350 1351 #undef __FUNCT__ 1352 #define __FUNCT__ "PCGAMGRegister" 1353 /*@C 1354 PCGAMGRegister - Register a PCGAMG implementation. 1355 1356 Input Parameters: 1357 + type - string that will be used as the name of the GAMG type. 1358 - create - function for creating the gamg context. 1359 1360 Level: advanced 1361 1362 .seealso: PCGAMGType, PCGAMG, PCGAMGSetType() 1363 @*/ 1364 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC)) 1365 { 1366 PetscErrorCode ierr; 1367 1368 PetscFunctionBegin; 1369 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1370 ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr); 1371 PetscFunctionReturn(0); 1372 } 1373 1374