1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc-private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */ 8 9 #if defined PETSC_GAMG_USE_LOG 10 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 11 #endif 12 13 #if defined PETSC_USE_LOG 14 PetscLogEvent PC_GAMGGgraph_AGG; 15 PetscLogEvent PC_GAMGGgraph_GEO; 16 PetscLogEvent PC_GAMGCoarsen_AGG; 17 PetscLogEvent PC_GAMGCoarsen_GEO; 18 PetscLogEvent PC_GAMGProlongator_AGG; 19 PetscLogEvent PC_GAMGProlongator_GEO; 20 PetscLogEvent PC_GAMGOptprol_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFunctionList GAMGList = 0; 31 static PetscBool PCGAMGPackageInitialized; 32 33 /* ----------------------------------------------------------------------------- */ 34 #undef __FUNCT__ 35 #define __FUNCT__ "PCReset_GAMG" 36 PetscErrorCode PCReset_GAMG(PC pc) 37 { 38 PetscErrorCode ierr; 39 PC_MG *mg = (PC_MG*)pc->data; 40 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 41 42 PetscFunctionBegin; 43 if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */ 44 PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__); 45 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 46 } 47 pc_gamg->data = NULL; pc_gamg->data_sz = 0; 48 49 if (pc_gamg->orig_data) { 50 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 51 } 52 PetscFunctionReturn(0); 53 } 54 55 /* -------------------------------------------------------------------------- */ 56 /* 57 PCGAMGCreateLevel_GAMG: create coarse op with RAP. repartition and/or reduce number 58 of active processors. 59 60 Input Parameter: 61 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 62 'pc_gamg->data_sz' are changed via repartitioning/reduction. 63 . Amat_fine - matrix on this fine (k) level 64 . cr_bs - coarse block size 65 In/Output Parameter: 66 . a_P_inout - prolongation operator to the next level (k-->k-1) 67 . a_nactive_proc - number of active procs 68 Output Parameter: 69 . a_Amat_crs - coarse matrix that is created (k-1) 70 */ 71 72 #undef __FUNCT__ 73 #define __FUNCT__ "PCGAMGCreateLevel_GAMG" 74 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs, 75 Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc, 76 IS * Pcolumnperm) 77 { 78 PetscErrorCode ierr; 79 PC_MG *mg = (PC_MG*)pc->data; 80 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 81 const PetscBool repart = pc_gamg->repart; 82 const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit; 83 Mat Cmat,Pold=*a_P_inout; 84 MPI_Comm comm; 85 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 86 PetscInt ncrs_eq,ncrs,f_bs; 87 88 PetscFunctionBegin; 89 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 90 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 91 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 92 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 93 /* RAP */ 94 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 95 96 /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/ 97 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 98 if (pc_gamg->data_cell_rows>0) { 99 ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 100 } 101 else { 102 PetscInt bs; 103 ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr); 104 ncrs = ncrs_eq/bs; 105 } 106 107 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 108 { 109 PetscInt ncrs_eq_glob; 110 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 111 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 112 if (new_size == 0 || ncrs_eq_glob < coarse_max) new_size = 1; 113 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 114 } 115 116 if (Pcolumnperm) *Pcolumnperm = NULL; 117 118 if (!repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 119 else { 120 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old; 121 IS is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices; 122 123 nloc_old = ncrs_eq/cr_bs; 124 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 125 #if defined PETSC_GAMG_USE_LOG 126 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 127 #endif 128 /* make 'is_eq_newproc' */ 129 ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr); 130 if (repart) { 131 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 132 Mat adj; 133 134 if (pc_gamg->verbose>0) { 135 if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq); 136 else { 137 PetscInt n; 138 ierr = MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr); 139 PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n); 140 } 141 } 142 143 /* get 'adj' */ 144 if (cr_bs == 1) { 145 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 146 } else { 147 /* make a scalar matrix to partition (no Stokes here) */ 148 Mat tMat; 149 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 150 const PetscScalar *vals; 151 const PetscInt *idx; 152 PetscInt *d_nnz, *o_nnz, M, N; 153 static PetscInt llev = 0; 154 155 ierr = PetscMalloc1(ncrs, &d_nnz);CHKERRQ(ierr); 156 ierr = PetscMalloc1(ncrs, &o_nnz);CHKERRQ(ierr); 157 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 158 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 159 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 160 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 161 d_nnz[jj] = ncols/cr_bs; 162 o_nnz[jj] = ncols/cr_bs; 163 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 164 if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs; 165 if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs; 166 } 167 168 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 169 ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 170 ierr = MatSetType(tMat,MATAIJ);CHKERRQ(ierr); 171 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 172 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 173 ierr = PetscFree(d_nnz);CHKERRQ(ierr); 174 ierr = PetscFree(o_nnz);CHKERRQ(ierr); 175 176 for (ii = Istart_crs; ii < Iend_crs; ii++) { 177 PetscInt dest_row = ii/cr_bs; 178 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 179 for (jj = 0; jj < ncols; jj++) { 180 PetscInt dest_col = idx[jj]/cr_bs; 181 PetscScalar v = 1.0; 182 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 183 } 184 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 185 } 186 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 187 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 188 189 if (llev++ == -1) { 190 PetscViewer viewer; char fname[32]; 191 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 192 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 193 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 194 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 195 } 196 197 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 198 199 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 200 } /* create 'adj' */ 201 202 { /* partition: get newproc_idx */ 203 char prefix[256]; 204 const char *pcpre; 205 const PetscInt *is_idx; 206 MatPartitioning mpart; 207 IS proc_is; 208 PetscInt targetPE; 209 210 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 211 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 212 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 213 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 214 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 215 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 216 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 217 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 218 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 219 220 /* collect IS info */ 221 ierr = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr); 222 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 223 targetPE = 1; /* bring to "front" of machine */ 224 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 225 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 226 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 227 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 228 } 229 } 230 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 231 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 232 } 233 ierr = MatDestroy(&adj);CHKERRQ(ierr); 234 235 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 236 if (newproc_idx != 0) { 237 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 238 } 239 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 240 241 PetscInt rfactor,targetPE; 242 /* find factor */ 243 if (new_size == 1) rfactor = size; /* easy */ 244 else { 245 PetscReal best_fact = 0.; 246 jj = -1; 247 for (kk = 1 ; kk <= size ; kk++) { 248 if (size%kk==0) { /* a candidate */ 249 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 250 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 251 if (fact > best_fact) { 252 best_fact = fact; jj = kk; 253 } 254 } 255 } 256 if (jj != -1) rfactor = jj; 257 else rfactor = 1; /* does this happen .. a prime */ 258 } 259 new_size = size/rfactor; 260 261 if (new_size==nactive) { 262 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 263 ierr = PetscFree(counts);CHKERRQ(ierr); 264 if (pc_gamg->verbose>0) { 265 PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq); 266 } 267 PetscFunctionReturn(0); 268 } 269 270 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq); 271 targetPE = rank/rfactor; 272 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 273 } /* end simple 'is_eq_newproc' */ 274 275 /* 276 Create an index set from the is_eq_newproc index set to indicate the mapping TO 277 */ 278 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 279 is_eq_num_prim = is_eq_num; 280 /* 281 Determine how many equations/vertices are assigned to each processor 282 */ 283 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 284 ncrs_eq_new = counts[rank]; 285 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 286 ncrs_new = ncrs_eq_new/cr_bs; /* eqs */ 287 288 ierr = PetscFree(counts);CHKERRQ(ierr); 289 #if defined PETSC_GAMG_USE_LOG 290 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 291 #endif 292 /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */ 293 { 294 Vec src_crd, dest_crd; 295 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 296 VecScatter vecscat; 297 PetscScalar *array; 298 IS isscat; 299 300 /* move data (for primal equations only) */ 301 /* Create a vector to contain the newly ordered element information */ 302 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 303 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr); 304 ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */ 305 /* 306 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 307 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 308 */ 309 ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr); 310 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 311 for (ii=0,jj=0; ii<ncrs; ii++) { 312 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 313 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 314 } 315 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 316 ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 317 ierr = PetscFree(tidx);CHKERRQ(ierr); 318 /* 319 Create a vector to contain the original vertex information for each element 320 */ 321 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr); 322 for (jj=0; jj<ndata_cols; jj++) { 323 const PetscInt stride0=ncrs*pc_gamg->data_cell_rows; 324 for (ii=0; ii<ncrs; ii++) { 325 for (kk=0; kk<ndata_rows; kk++) { 326 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 327 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 328 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 329 } 330 } 331 } 332 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 333 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 334 /* 335 Scatter the element vertex information (still in the original vertex ordering) 336 to the correct processor 337 */ 338 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 339 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 340 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 341 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 342 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 343 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 344 /* 345 Put the element vertex data into a new allocation of the gdata->ele 346 */ 347 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 348 ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr); 349 350 pc_gamg->data_sz = node_data_sz*ncrs_new; 351 strideNew = ncrs_new*ndata_rows; 352 353 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 354 for (jj=0; jj<ndata_cols; jj++) { 355 for (ii=0; ii<ncrs_new; ii++) { 356 for (kk=0; kk<ndata_rows; kk++) { 357 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 358 pc_gamg->data[ix] = PetscRealPart(array[jx]); 359 } 360 } 361 } 362 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 363 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 364 } 365 /* move A and P (columns) with new layout */ 366 #if defined PETSC_GAMG_USE_LOG 367 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 368 #endif 369 370 /* 371 Invert for MatGetSubMatrix 372 */ 373 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 374 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 375 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 376 if (is_eq_num != is_eq_num_prim) { 377 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 378 } 379 if (Pcolumnperm) { 380 ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr); 381 *Pcolumnperm = new_eq_indices; 382 } 383 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 384 #if defined PETSC_GAMG_USE_LOG 385 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 386 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 387 #endif 388 /* 'a_Amat_crs' output */ 389 { 390 Mat mat; 391 ierr = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 392 *a_Amat_crs = mat; 393 394 if (!PETSC_TRUE) { 395 PetscInt cbs, rbs; 396 ierr = MatGetBlockSizes(Cmat, &rbs, &cbs);CHKERRQ(ierr); 397 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 398 ierr = MatGetBlockSizes(mat, &rbs, &cbs);CHKERRQ(ierr); 399 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);CHKERRQ(ierr); 400 } 401 } 402 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 403 404 #if defined PETSC_GAMG_USE_LOG 405 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 406 #endif 407 /* prolongator */ 408 { 409 IS findices; 410 PetscInt Istart,Iend; 411 Mat Pnew; 412 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 413 #if defined PETSC_GAMG_USE_LOG 414 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 415 #endif 416 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 417 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 418 ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 419 ierr = ISDestroy(&findices);CHKERRQ(ierr); 420 421 if (!PETSC_TRUE) { 422 PetscInt cbs, rbs; 423 ierr = MatGetBlockSizes(Pold, &rbs, &cbs);CHKERRQ(ierr); 424 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 425 ierr = MatGetBlockSizes(Pnew, &rbs, &cbs);CHKERRQ(ierr); 426 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 427 } 428 #if defined PETSC_GAMG_USE_LOG 429 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 430 #endif 431 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 432 433 /* output - repartitioned */ 434 *a_P_inout = Pnew; 435 } 436 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 437 438 *a_nactive_proc = new_size; /* output */ 439 } 440 441 /* outout matrix data */ 442 if (!PETSC_TRUE) { 443 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 444 if (llev==0) { 445 sprintf(fname,"Cmat_%d.m",llev++); 446 PetscViewerASCIIOpen(comm,fname,&viewer); 447 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 448 ierr = MatView(Amat_fine, viewer);CHKERRQ(ierr); 449 ierr = PetscViewerDestroy(&viewer); 450 } 451 sprintf(fname,"Cmat_%d.m",llev++); 452 PetscViewerASCIIOpen(comm,fname,&viewer); 453 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 454 ierr = MatView(Cmat, viewer);CHKERRQ(ierr); 455 ierr = PetscViewerDestroy(&viewer); 456 } 457 PetscFunctionReturn(0); 458 } 459 460 /* -------------------------------------------------------------------------- */ 461 /* 462 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 463 by setting data structures and options. 464 465 Input Parameter: 466 . pc - the preconditioner context 467 468 Application Interface Routine: PCSetUp() 469 470 Notes: 471 The interface routine PCSetUp() is not usually called directly by 472 the user, but instead is called by PCApply() if necessary. 473 */ 474 #undef __FUNCT__ 475 #define __FUNCT__ "PCSetUp_GAMG" 476 PetscErrorCode PCSetUp_GAMG(PC pc) 477 { 478 PetscErrorCode ierr; 479 PC_MG *mg = (PC_MG*)pc->data; 480 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 481 Mat Pmat = pc->pmat; 482 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 483 MPI_Comm comm; 484 PetscMPIInt rank,size,nactivepe; 485 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 486 PetscReal emaxs[GAMG_MAXLEVELS]; 487 IS *ASMLocalIDsArr[GAMG_MAXLEVELS]; 488 PetscLogDouble nnz0=0.,nnztot=0.; 489 MatInfo info; 490 PetscBool redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol); 491 492 PetscFunctionBegin; 493 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 494 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 495 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 496 497 if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled); 498 499 if (pc_gamg->setup_count++ > 0) { 500 if (redo_mesh_setup) { 501 /* reset everything */ 502 ierr = PCReset_MG(pc);CHKERRQ(ierr); 503 pc->setupcalled = 0; 504 } else { 505 PC_MG_Levels **mglevels = mg->levels; 506 /* just do Galerkin grids */ 507 Mat B,dA,dB; 508 509 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 510 if (pc_gamg->Nlevels > 1) { 511 /* currently only handle case where mat and pmat are the same on coarser levels */ 512 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr); 513 /* (re)set to get dirty flag */ 514 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr); 515 516 for (level=pc_gamg->Nlevels-2; level>=0; level--) { 517 /* the first time through the matrix structure has changed from repartitioning */ 518 if (pc_gamg->setup_count==2) { 519 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 520 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 521 522 mglevels[level]->A = B; 523 } else { 524 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr); 525 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 526 } 527 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr); 528 dB = B; 529 } 530 } 531 532 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 533 534 PetscFunctionReturn(0); 535 } 536 } 537 538 if (!pc_gamg->data) { 539 if (pc_gamg->orig_data) { 540 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 541 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 542 543 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 544 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 545 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 546 547 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr); 548 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 549 } else { 550 if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 551 ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr); 552 } 553 } 554 555 /* cache original data for reuse */ 556 if (!pc_gamg->orig_data && redo_mesh_setup) { 557 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr); 558 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 559 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 560 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 561 } 562 563 /* get basic dims */ 564 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 565 566 ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr); 567 if (pc_gamg->verbose) { 568 PetscInt NN = M; 569 if (pc_gamg->verbose==1) { 570 ierr = MatGetInfo(Pmat,MAT_LOCAL,&info);CHKERRQ(ierr); 571 ierr = MatGetLocalSize(Pmat, &NN, &qq);CHKERRQ(ierr); 572 if (!NN) NN=1; 573 } else { 574 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); 575 } 576 nnz0 = info.nz_used; 577 nnztot = info.nz_used; 578 ierr = PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 579 rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 580 (int)(nnz0/(PetscReal)NN),size);CHKERRQ(ierr); 581 } 582 583 /* Get A_i and R_i */ 584 for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */ 585 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (size==1 || nactivepe>1); */ 586 level++) { 587 pc_gamg->firstCoarsen = (level ? PETSC_FALSE : PETSC_TRUE); 588 level1 = level + 1; 589 #if defined PETSC_GAMG_USE_LOG 590 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 591 #if (defined GAMG_STAGES) 592 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 593 #endif 594 #endif 595 { /* construct prolongator */ 596 Mat Gmat; 597 PetscCoarsenData *agg_lists; 598 Mat Prol11; 599 600 ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr); 601 ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 602 ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr); 603 604 /* could have failed to create new level */ 605 if (Prol11) { 606 /* get new block size of coarse matrices */ 607 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 608 609 if (pc_gamg->ops->optprol) { 610 /* smooth */ 611 ierr = pc_gamg->ops->optprol(pc, Aarr[level], &Prol11);CHKERRQ(ierr); 612 } 613 614 Parr[level1] = Prol11; 615 } else Parr[level1] = NULL; 616 617 if (pc_gamg->use_aggs_in_gasm) { 618 PetscInt bs; 619 ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr); 620 ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 621 } 622 623 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 624 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 625 } /* construct prolongator scope */ 626 #if defined PETSC_GAMG_USE_LOG 627 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 628 #endif 629 /* cache eigen estimate */ 630 if (pc_gamg->emax_id != -1) { 631 PetscBool flag; 632 ierr = PetscObjectComposedDataGetReal((PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag);CHKERRQ(ierr); 633 if (!flag) emaxs[level] = -1.; 634 } else emaxs[level] = -1.; 635 if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 636 if (!Parr[level1]) { 637 if (pc_gamg->verbose) { 638 ierr = PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);CHKERRQ(ierr); 639 } 640 break; 641 } 642 #if defined PETSC_GAMG_USE_LOG 643 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 644 #endif 645 646 ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, 647 &Parr[level1], &Aarr[level1], &nactivepe, NULL);CHKERRQ(ierr); 648 649 #if defined PETSC_GAMG_USE_LOG 650 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 651 #endif 652 ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr); 653 654 if (pc_gamg->verbose > 0) { 655 PetscInt NN = M; 656 if (pc_gamg->verbose==1) { 657 ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info);CHKERRQ(ierr); 658 ierr = MatGetLocalSize(Aarr[level1], &NN, &qq);CHKERRQ(ierr); 659 if (!NN) NN=1; 660 } else { 661 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 662 } 663 664 nnztot += info.nz_used; 665 ierr = PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 666 rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols, 667 (int)(info.nz_used/(PetscReal)NN), nactivepe);CHKERRQ(ierr); 668 } 669 670 /* stop if one node or one proc -- could pull back for singular problems */ 671 if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M < 2) 672 || nactivepe==1 ) { 673 level++; 674 break; 675 } 676 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 677 ierr = PetscLogStagePop();CHKERRQ(ierr); 678 #endif 679 } /* levels */ 680 pc_gamg->firstCoarsen = PETSC_FALSE; 681 682 if (pc_gamg->data) { 683 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 684 pc_gamg->data = NULL; 685 } 686 687 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0); 688 pc_gamg->Nlevels = level + 1; 689 fine_level = level; 690 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 691 692 /* simple setup */ 693 if (!PETSC_TRUE) { 694 PC_MG_Levels **mglevels = mg->levels; 695 for (lidx=0,level=pc_gamg->Nlevels-1; 696 lidx<fine_level; 697 lidx++, level--) { 698 ierr = PCMGSetInterpolation(pc, lidx+1, Parr[level]);CHKERRQ(ierr); 699 ierr = KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level]);CHKERRQ(ierr); 700 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 701 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 702 } 703 ierr = KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0]);CHKERRQ(ierr); 704 705 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 706 } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 707 /* set default smoothers & set operators */ 708 for (lidx = 1, level = pc_gamg->Nlevels-2; 709 lidx <= fine_level; 710 lidx++, level--) { 711 KSP smoother; 712 PC subpc; 713 714 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 715 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 716 717 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 718 /* set ops */ 719 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr); 720 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 721 722 /* set defaults */ 723 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 724 725 /* set blocks for GASM smoother that uses the 'aggregates' */ 726 if (pc_gamg->use_aggs_in_gasm) { 727 PetscInt sz; 728 IS *is; 729 730 sz = nASMBlocksArr[level]; 731 is = ASMLocalIDsArr[level]; 732 ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr); 733 ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr); 734 if (sz==0) { 735 IS is; 736 PetscInt my0,kk; 737 ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr); 738 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 739 ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr); 740 ierr = ISDestroy(&is);CHKERRQ(ierr); 741 } else { 742 PetscInt kk; 743 ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr); 744 for (kk=0; kk<sz; kk++) { 745 ierr = ISDestroy(&is[kk]);CHKERRQ(ierr); 746 } 747 ierr = PetscFree(is);CHKERRQ(ierr); 748 } 749 ASMLocalIDsArr[level] = NULL; 750 nASMBlocksArr[level] = 0; 751 ierr = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr); 752 } else { 753 ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr); 754 } 755 } 756 { 757 /* coarse grid */ 758 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 759 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 760 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 761 ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr); 762 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 763 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 764 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 765 ierr = PCSetUp(subpc);CHKERRQ(ierr); 766 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 767 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 768 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 769 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 770 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 771 ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr); 772 /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in 773 * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to 774 * view every subdomain as though they were different. */ 775 ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE; 776 } 777 778 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 779 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 780 ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr); 781 ierr = PetscOptionsEnd();CHKERRQ(ierr); 782 if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used."); 783 784 /* create cheby smoothers */ 785 for (lidx = 1, level = pc_gamg->Nlevels-2; 786 lidx <= fine_level; 787 lidx++, level--) { 788 KSP smoother; 789 PetscBool flag,flag2; 790 PC subpc; 791 792 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 793 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 794 795 /* do my own cheby */ 796 ierr = PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);CHKERRQ(ierr); 797 if (flag) { 798 PetscReal emax, emin; 799 ierr = PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);CHKERRQ(ierr); 800 ierr = PetscObjectTypeCompare((PetscObject)subpc, PCSOR, &flag2);CHKERRQ(ierr); 801 if ((flag||flag2) && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC but lets acccept SOR because it is close and safe (always lower) */ 802 else { /* eigen estimate 'emax' -- this is done in cheby */ 803 KSP eksp; 804 Mat Lmat = Aarr[level]; 805 Vec bb, xx; 806 807 ierr = MatCreateVecs(Lmat, &bb, 0);CHKERRQ(ierr); 808 ierr = MatCreateVecs(Lmat, &xx, 0);CHKERRQ(ierr); 809 { 810 PetscRandom rctx; 811 ierr = PetscRandomCreate(comm,&rctx);CHKERRQ(ierr); 812 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 813 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 814 ierr = PetscRandomDestroy(&rctx);CHKERRQ(ierr); 815 } 816 817 /* zeroing out BC rows -- needed for crazy matrices */ 818 { 819 PetscInt Istart,Iend,ncols,jj,Ii; 820 PetscScalar zero = 0.0; 821 ierr = MatGetOwnershipRange(Lmat, &Istart, &Iend);CHKERRQ(ierr); 822 for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) { 823 ierr = MatGetRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 824 if (ncols <= 1) { 825 ierr = VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);CHKERRQ(ierr); 826 } 827 ierr = MatRestoreRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 828 } 829 ierr = VecAssemblyBegin(bb);CHKERRQ(ierr); 830 ierr = VecAssemblyEnd(bb);CHKERRQ(ierr); 831 } 832 833 ierr = KSPCreate(comm, &eksp);CHKERRQ(ierr); 834 ierr = KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);CHKERRQ(ierr); 835 ierr = KSPSetNormType(eksp, KSP_NORM_NONE);CHKERRQ(ierr); 836 ierr = KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);CHKERRQ(ierr); 837 ierr = KSPAppendOptionsPrefix(eksp, "gamg_est_");CHKERRQ(ierr); 838 ierr = KSPSetFromOptions(eksp);CHKERRQ(ierr); 839 840 ierr = KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);CHKERRQ(ierr); 841 ierr = KSPSetOperators(eksp, Lmat, Lmat);CHKERRQ(ierr); 842 ierr = KSPSetComputeSingularValues(eksp,PETSC_TRUE);CHKERRQ(ierr); 843 844 /* set PC type to be same as smoother */ 845 ierr = KSPSetPC(eksp, subpc);CHKERRQ(ierr); 846 847 /* solve - keep stuff out of logging */ 848 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 849 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 850 ierr = KSPSolve(eksp, bb, xx);CHKERRQ(ierr); 851 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 852 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 853 854 ierr = KSPComputeExtremeSingularValues(eksp, &emax, &emin);CHKERRQ(ierr); 855 856 ierr = VecDestroy(&xx);CHKERRQ(ierr); 857 ierr = VecDestroy(&bb);CHKERRQ(ierr); 858 ierr = KSPDestroy(&eksp);CHKERRQ(ierr); 859 860 if (pc_gamg->verbose > 0) { 861 PetscInt N1, tt; 862 ierr = MatGetSize(Aarr[level], &N1, &tt);CHKERRQ(ierr); 863 PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1); 864 } 865 } 866 { 867 PetscInt N1, N0; 868 ierr = MatGetSize(Aarr[level], &N1, NULL);CHKERRQ(ierr); 869 ierr = MatGetSize(Aarr[level+1], &N0, NULL);CHKERRQ(ierr); 870 /* heuristic - is this crap? */ 871 /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */ 872 emin = emax * pc_gamg->eigtarget[0]; 873 emax *= pc_gamg->eigtarget[1]; 874 } 875 ierr = KSPChebyshevSetEigenvalues(smoother, emax, emin);CHKERRQ(ierr); 876 } /* setup checby flag */ 877 } /* non-coarse levels */ 878 879 /* clean up */ 880 for (level=1; level<pc_gamg->Nlevels; level++) { 881 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 882 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 883 } 884 885 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 886 } else { 887 KSP smoother; 888 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__); 889 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 890 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr); 891 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 892 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 893 } 894 PetscFunctionReturn(0); 895 } 896 897 /* ------------------------------------------------------------------------- */ 898 /* 899 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 900 that was created with PCCreate_GAMG(). 901 902 Input Parameter: 903 . pc - the preconditioner context 904 905 Application Interface Routine: PCDestroy() 906 */ 907 #undef __FUNCT__ 908 #define __FUNCT__ "PCDestroy_GAMG" 909 PetscErrorCode PCDestroy_GAMG(PC pc) 910 { 911 PetscErrorCode ierr; 912 PC_MG *mg = (PC_MG*)pc->data; 913 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 914 915 PetscFunctionBegin; 916 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 917 if (pc_gamg->ops->destroy) { 918 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 919 } 920 ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr); 921 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 922 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 923 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 924 PetscFunctionReturn(0); 925 } 926 927 928 #undef __FUNCT__ 929 #define __FUNCT__ "PCGAMGSetProcEqLim" 930 /*@ 931 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction. 932 933 Logically Collective on PC 934 935 Input Parameters: 936 + pc - the preconditioner context 937 - n - the number of equations 938 939 940 Options Database Key: 941 . -pc_gamg_process_eq_limit <limit> 942 943 Level: intermediate 944 945 Concepts: Unstructured multrigrid preconditioner 946 947 .seealso: () 948 @*/ 949 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 950 { 951 PetscErrorCode ierr; 952 953 PetscFunctionBegin; 954 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 955 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 956 PetscFunctionReturn(0); 957 } 958 959 #undef __FUNCT__ 960 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 961 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 962 { 963 PC_MG *mg = (PC_MG*)pc->data; 964 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 965 966 PetscFunctionBegin; 967 if (n>0) pc_gamg->min_eq_proc = n; 968 PetscFunctionReturn(0); 969 } 970 971 #undef __FUNCT__ 972 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 973 /*@ 974 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 975 976 Collective on PC 977 978 Input Parameters: 979 + pc - the preconditioner context 980 - n - maximum number of equations to aim for 981 982 Options Database Key: 983 . -pc_gamg_coarse_eq_limit <limit> 984 985 Level: intermediate 986 987 Concepts: Unstructured multrigrid preconditioner 988 989 @*/ 990 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 991 { 992 PetscErrorCode ierr; 993 994 PetscFunctionBegin; 995 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 996 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 997 PetscFunctionReturn(0); 998 } 999 1000 #undef __FUNCT__ 1001 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 1002 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 1003 { 1004 PC_MG *mg = (PC_MG*)pc->data; 1005 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1006 1007 PetscFunctionBegin; 1008 if (n>0) pc_gamg->coarse_eq_limit = n; 1009 PetscFunctionReturn(0); 1010 } 1011 1012 #undef __FUNCT__ 1013 #define __FUNCT__ "PCGAMGSetRepartitioning" 1014 /*@ 1015 PCGAMGSetRepartitioning - Repartition the coarse grids 1016 1017 Collective on PC 1018 1019 Input Parameters: 1020 + pc - the preconditioner context 1021 - n - PETSC_TRUE or PETSC_FALSE 1022 1023 Options Database Key: 1024 . -pc_gamg_repartition <true,false> 1025 1026 Level: intermediate 1027 1028 Concepts: Unstructured multrigrid preconditioner 1029 1030 .seealso: () 1031 @*/ 1032 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 1033 { 1034 PetscErrorCode ierr; 1035 1036 PetscFunctionBegin; 1037 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1038 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1039 PetscFunctionReturn(0); 1040 } 1041 1042 #undef __FUNCT__ 1043 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 1044 static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 1045 { 1046 PC_MG *mg = (PC_MG*)pc->data; 1047 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1048 1049 PetscFunctionBegin; 1050 pc_gamg->repart = n; 1051 PetscFunctionReturn(0); 1052 } 1053 1054 #undef __FUNCT__ 1055 #define __FUNCT__ "PCGAMGSetReuseInterpolation" 1056 /*@ 1057 PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner 1058 1059 Collective on PC 1060 1061 Input Parameters: 1062 + pc - the preconditioner context 1063 - n - PETSC_TRUE or PETSC_FALSE 1064 1065 Options Database Key: 1066 . -pc_gamg_reuse_interpolation <true,false> 1067 1068 Level: intermediate 1069 1070 Concepts: Unstructured multrigrid preconditioner 1071 1072 .seealso: () 1073 @*/ 1074 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n) 1075 { 1076 PetscErrorCode ierr; 1077 1078 PetscFunctionBegin; 1079 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1080 ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1081 PetscFunctionReturn(0); 1082 } 1083 1084 #undef __FUNCT__ 1085 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG" 1086 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) 1087 { 1088 PC_MG *mg = (PC_MG*)pc->data; 1089 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1090 1091 PetscFunctionBegin; 1092 pc_gamg->reuse_prol = n; 1093 PetscFunctionReturn(0); 1094 } 1095 1096 #undef __FUNCT__ 1097 #define __FUNCT__ "PCGAMGSetUseASMAggs" 1098 /*@ 1099 PCGAMGSetUseASMAggs - 1100 1101 Collective on PC 1102 1103 Input Parameters: 1104 . pc - the preconditioner context 1105 1106 1107 Options Database Key: 1108 . -pc_gamg_use_agg_gasm 1109 1110 Level: intermediate 1111 1112 Concepts: Unstructured multrigrid preconditioner 1113 1114 .seealso: () 1115 @*/ 1116 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 1117 { 1118 PetscErrorCode ierr; 1119 1120 PetscFunctionBegin; 1121 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1122 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1123 PetscFunctionReturn(0); 1124 } 1125 1126 #undef __FUNCT__ 1127 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 1128 static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 1129 { 1130 PC_MG *mg = (PC_MG*)pc->data; 1131 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1132 1133 PetscFunctionBegin; 1134 pc_gamg->use_aggs_in_gasm = n; 1135 PetscFunctionReturn(0); 1136 } 1137 1138 #undef __FUNCT__ 1139 #define __FUNCT__ "PCGAMGSetNlevels" 1140 /*@ 1141 PCGAMGSetNlevels - Sets the maximum number of levels PCGAMG will use 1142 1143 Not collective on PC 1144 1145 Input Parameters: 1146 + pc - the preconditioner 1147 - n - the maximum number of levels to use 1148 1149 Options Database Key: 1150 . -pc_mg_levels 1151 1152 Level: intermediate 1153 1154 Concepts: Unstructured multrigrid preconditioner 1155 1156 .seealso: () 1157 @*/ 1158 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1159 { 1160 PetscErrorCode ierr; 1161 1162 PetscFunctionBegin; 1163 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1164 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1165 PetscFunctionReturn(0); 1166 } 1167 1168 #undef __FUNCT__ 1169 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1170 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1171 { 1172 PC_MG *mg = (PC_MG*)pc->data; 1173 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1174 1175 PetscFunctionBegin; 1176 pc_gamg->Nlevels = n; 1177 PetscFunctionReturn(0); 1178 } 1179 1180 #undef __FUNCT__ 1181 #define __FUNCT__ "PCGAMGSetThreshold" 1182 /*@ 1183 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1184 1185 Not collective on PC 1186 1187 Input Parameters: 1188 + pc - the preconditioner context 1189 - threshold - the threshold value, 0.0 is the lowest possible 1190 1191 Options Database Key: 1192 . -pc_gamg_threshold <threshold> 1193 1194 Level: intermediate 1195 1196 Concepts: Unstructured multrigrid preconditioner 1197 1198 .seealso: () 1199 @*/ 1200 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1201 { 1202 PetscErrorCode ierr; 1203 1204 PetscFunctionBegin; 1205 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1206 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1207 PetscFunctionReturn(0); 1208 } 1209 1210 #undef __FUNCT__ 1211 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1212 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1213 { 1214 PC_MG *mg = (PC_MG*)pc->data; 1215 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1216 1217 PetscFunctionBegin; 1218 pc_gamg->threshold = n; 1219 PetscFunctionReturn(0); 1220 } 1221 1222 #undef __FUNCT__ 1223 #define __FUNCT__ "PCGAMGSetType" 1224 /*@ 1225 PCGAMGSetType - Set solution method 1226 1227 Collective on PC 1228 1229 Input Parameters: 1230 + pc - the preconditioner context 1231 - type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL 1232 1233 Options Database Key: 1234 . -pc_gamg_type <agg,geo,classical> 1235 1236 Level: intermediate 1237 1238 Concepts: Unstructured multrigrid preconditioner 1239 1240 .seealso: PCGAMGGetType(), PCGAMG 1241 @*/ 1242 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1243 { 1244 PetscErrorCode ierr; 1245 1246 PetscFunctionBegin; 1247 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1248 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1249 PetscFunctionReturn(0); 1250 } 1251 1252 #undef __FUNCT__ 1253 #define __FUNCT__ "PCGAMGGetType" 1254 /*@ 1255 PCGAMGGetType - Get solution method 1256 1257 Collective on PC 1258 1259 Input Parameter: 1260 . pc - the preconditioner context 1261 1262 Output Parameter: 1263 . type - the type of algorithm used 1264 1265 Level: intermediate 1266 1267 Concepts: Unstructured multrigrid preconditioner 1268 1269 .seealso: PCGAMGSetType() 1270 @*/ 1271 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type) 1272 { 1273 PetscErrorCode ierr; 1274 1275 PetscFunctionBegin; 1276 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1277 ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr); 1278 PetscFunctionReturn(0); 1279 } 1280 1281 #undef __FUNCT__ 1282 #define __FUNCT__ "PCGAMGGetType_GAMG" 1283 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type) 1284 { 1285 PC_MG *mg = (PC_MG*)pc->data; 1286 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1287 1288 PetscFunctionBegin; 1289 *type = pc_gamg->type; 1290 PetscFunctionReturn(0); 1291 } 1292 1293 #undef __FUNCT__ 1294 #define __FUNCT__ "PCGAMGSetType_GAMG" 1295 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1296 { 1297 PetscErrorCode ierr,(*r)(PC); 1298 PC_MG *mg = (PC_MG*)pc->data; 1299 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1300 1301 PetscFunctionBegin; 1302 pc_gamg->type = type; 1303 ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr); 1304 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1305 if (pc_gamg->ops->destroy) { 1306 /* there was something here - kill it */ 1307 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1308 ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr); 1309 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1310 /* cleaning up common data in pc_gamg - this should disapear someday */ 1311 pc_gamg->data_cell_cols = 0; 1312 pc_gamg->data_cell_rows = 0; 1313 pc_gamg->orig_data_cell_cols = 0; 1314 pc_gamg->orig_data_cell_rows = 0; 1315 if (pc_gamg->data_sz) { 1316 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 1317 pc_gamg->data_sz = 0; 1318 } else if (pc_gamg->data) { 1319 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); /* can this happen ? */ 1320 } 1321 } 1322 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1323 ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr); 1324 ierr = (*r)(pc);CHKERRQ(ierr); 1325 PetscFunctionReturn(0); 1326 } 1327 1328 #undef __FUNCT__ 1329 #define __FUNCT__ "PCSetFromOptions_GAMG" 1330 PetscErrorCode PCSetFromOptions_GAMG(PetscOptions *PetscOptionsObject,PC pc) 1331 { 1332 PetscErrorCode ierr; 1333 PC_MG *mg = (PC_MG*)pc->data; 1334 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1335 PetscBool flag; 1336 PetscInt two = 2; 1337 MPI_Comm comm; 1338 1339 PetscFunctionBegin; 1340 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1341 ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr); 1342 { 1343 /* -pc_gamg_type */ 1344 { 1345 char tname[256]; 1346 ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr); 1347 if (flag) { 1348 ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr); 1349 } 1350 } 1351 /* -pc_gamg_verbose */ 1352 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG","none", pc_gamg->verbose,&pc_gamg->verbose, NULL);CHKERRQ(ierr); 1353 /* -pc_gamg_repartition */ 1354 ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr); 1355 /* -pc_gamg_reuse_interpolation */ 1356 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr); 1357 /* -pc_gamg_use_agg_gasm */ 1358 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);CHKERRQ(ierr); 1359 /* -pc_gamg_process_eq_limit */ 1360 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr); 1361 /* -pc_gamg_coarse_eq_limit */ 1362 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr); 1363 /* -pc_gamg_threshold */ 1364 ierr = PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);CHKERRQ(ierr); 1365 if (flag && pc_gamg->verbose) { 1366 ierr = PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);CHKERRQ(ierr); 1367 } 1368 /* -pc_gamg_eigtarget */ 1369 ierr = PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);CHKERRQ(ierr); 1370 ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 1371 1372 /* set options for subtype */ 1373 if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);} 1374 } 1375 ierr = PetscOptionsTail();CHKERRQ(ierr); 1376 PetscFunctionReturn(0); 1377 } 1378 1379 /* -------------------------------------------------------------------------- */ 1380 /*MC 1381 PCGAMG - Geometric algebraic multigrid (AMG) preconditioner 1382 1383 Options Database Keys: 1384 Multigrid options(inherited) 1385 + -pc_mg_cycles <v>: v or w (PCMGSetCycleType()) 1386 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1387 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1388 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1389 1390 1391 Notes: In order to obtain good performance for PCGAMG for vector valued problems you must 1392 $ Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point 1393 $ Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator 1394 $ See the Users Manual Chapter 4 for more details 1395 1396 Level: intermediate 1397 1398 Concepts: algebraic multigrid 1399 1400 .seealso: PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 1401 PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType() 1402 M*/ 1403 1404 #undef __FUNCT__ 1405 #define __FUNCT__ "PCCreate_GAMG" 1406 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) 1407 { 1408 PetscErrorCode ierr; 1409 PC_GAMG *pc_gamg; 1410 PC_MG *mg; 1411 #if defined PETSC_GAMG_USE_LOG 1412 static long count = 0; 1413 #endif 1414 1415 PetscFunctionBegin; 1416 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1417 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1418 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1419 1420 /* create a supporting struct and attach it to pc */ 1421 ierr = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr); 1422 mg = (PC_MG*)pc->data; 1423 mg->galerkin = 2; /* Use Galerkin, but it is computed externally */ 1424 mg->innerctx = pc_gamg; 1425 1426 ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr); 1427 1428 pc_gamg->setup_count = 0; 1429 /* these should be in subctx but repartitioning needs simple arrays */ 1430 pc_gamg->data_sz = 0; 1431 pc_gamg->data = 0; 1432 1433 /* register AMG type */ 1434 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1435 1436 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1437 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1438 pc->ops->setup = PCSetUp_GAMG; 1439 pc->ops->reset = PCReset_GAMG; 1440 pc->ops->destroy = PCDestroy_GAMG; 1441 1442 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1443 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1444 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr); 1445 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr); 1446 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr); 1447 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1448 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr); 1449 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr); 1450 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr); 1451 pc_gamg->repart = PETSC_FALSE; 1452 pc_gamg->reuse_prol = PETSC_FALSE; 1453 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1454 pc_gamg->min_eq_proc = 50; 1455 pc_gamg->coarse_eq_limit = 50; 1456 pc_gamg->threshold = 0.; 1457 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1458 pc_gamg->verbose = 0; 1459 pc_gamg->emax_id = -1; 1460 pc_gamg->firstCoarsen = PETSC_FALSE; 1461 pc_gamg->eigtarget[0] = 0.05; 1462 pc_gamg->eigtarget[1] = 1.05; 1463 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1464 1465 /* private events */ 1466 #if defined PETSC_GAMG_USE_LOG 1467 if (count++ == 0) { 1468 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1469 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1470 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1471 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1472 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1473 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1474 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1475 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1476 ierr = PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1477 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1478 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1479 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1480 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1481 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1482 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1483 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1484 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1485 1486 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1487 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1488 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1489 /* create timer stages */ 1490 #if defined GAMG_STAGES 1491 { 1492 char str[32]; 1493 PetscInt lidx; 1494 sprintf(str,"MG Level %d (finest)",0); 1495 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1496 for (lidx=1; lidx<9; lidx++) { 1497 sprintf(str,"MG Level %d",lidx); 1498 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1499 } 1500 } 1501 #endif 1502 } 1503 #endif 1504 /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */ 1505 ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr); 1506 PetscFunctionReturn(0); 1507 } 1508 1509 #undef __FUNCT__ 1510 #define __FUNCT__ "PCGAMGInitializePackage" 1511 /*@C 1512 PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called 1513 from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG() 1514 when using static libraries. 1515 1516 Level: developer 1517 1518 .keywords: PC, PCGAMG, initialize, package 1519 .seealso: PetscInitialize() 1520 @*/ 1521 PetscErrorCode PCGAMGInitializePackage(void) 1522 { 1523 PetscErrorCode ierr; 1524 1525 PetscFunctionBegin; 1526 if (PCGAMGPackageInitialized) PetscFunctionReturn(0); 1527 PCGAMGPackageInitialized = PETSC_TRUE; 1528 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr); 1529 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr); 1530 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr); 1531 ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr); 1532 1533 /* general events */ 1534 #if defined PETSC_USE_LOG 1535 ierr = PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);CHKERRQ(ierr); 1536 ierr = PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);CHKERRQ(ierr); 1537 ierr = PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1538 ierr = PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1539 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1540 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1541 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);CHKERRQ(ierr); 1542 #endif 1543 1544 PetscFunctionReturn(0); 1545 } 1546 1547 #undef __FUNCT__ 1548 #define __FUNCT__ "PCGAMGFinalizePackage" 1549 /*@C 1550 PCGAMGFinalizePackage - This function destroys everything in the PCGAMG package. It is 1551 called from PetscFinalize(). 1552 1553 Level: developer 1554 1555 .keywords: Petsc, destroy, package 1556 .seealso: PetscFinalize() 1557 @*/ 1558 PetscErrorCode PCGAMGFinalizePackage(void) 1559 { 1560 PetscErrorCode ierr; 1561 1562 PetscFunctionBegin; 1563 PCGAMGPackageInitialized = PETSC_FALSE; 1564 ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr); 1565 PetscFunctionReturn(0); 1566 } 1567 1568 #undef __FUNCT__ 1569 #define __FUNCT__ "PCGAMGRegister" 1570 /*@C 1571 PCGAMGRegister - Register a PCGAMG implementation. 1572 1573 Input Parameters: 1574 + type - string that will be used as the name of the GAMG type. 1575 - create - function for creating the gamg context. 1576 1577 Level: advanced 1578 1579 .seealso: PCGAMGGetContext(), PCGAMGSetContext() 1580 @*/ 1581 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC)) 1582 { 1583 PetscErrorCode ierr; 1584 1585 PetscFunctionBegin; 1586 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1587 ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr); 1588 PetscFunctionReturn(0); 1589 } 1590 1591