1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc-private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */ 8 9 #if defined PETSC_GAMG_USE_LOG 10 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 11 #endif 12 13 #if defined PETSC_USE_LOG 14 PetscLogEvent PC_GAMGGraph_AGG; 15 PetscLogEvent PC_GAMGGraph_GEO; 16 PetscLogEvent PC_GAMGCoarsen_AGG; 17 PetscLogEvent PC_GAMGCoarsen_GEO; 18 PetscLogEvent PC_GAMGProlongator_AGG; 19 PetscLogEvent PC_GAMGProlongator_GEO; 20 PetscLogEvent PC_GAMGOptProlongator_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFunctionList GAMGList = 0; 31 static PetscBool PCGAMGPackageInitialized; 32 33 /* ----------------------------------------------------------------------------- */ 34 #undef __FUNCT__ 35 #define __FUNCT__ "PCReset_GAMG" 36 PetscErrorCode PCReset_GAMG(PC pc) 37 { 38 PetscErrorCode ierr; 39 PC_MG *mg = (PC_MG*)pc->data; 40 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 41 42 PetscFunctionBegin; 43 if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */ 44 PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__); 45 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 46 } 47 pc_gamg->data = NULL; pc_gamg->data_sz = 0; 48 49 if (pc_gamg->orig_data) { 50 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 51 } 52 PetscFunctionReturn(0); 53 } 54 55 /* -------------------------------------------------------------------------- */ 56 /* 57 PCGAMGCreateLevel_GAMG: create coarse op with RAP. repartition and/or reduce number 58 of active processors. 59 60 Input Parameter: 61 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 62 'pc_gamg->data_sz' are changed via repartitioning/reduction. 63 . Amat_fine - matrix on this fine (k) level 64 . cr_bs - coarse block size 65 In/Output Parameter: 66 . a_P_inout - prolongation operator to the next level (k-->k-1) 67 . a_nactive_proc - number of active procs 68 Output Parameter: 69 . a_Amat_crs - coarse matrix that is created (k-1) 70 */ 71 72 #undef __FUNCT__ 73 #define __FUNCT__ "PCGAMGCreateLevel_GAMG" 74 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs, 75 Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc, 76 IS * Pcolumnperm) 77 { 78 PetscErrorCode ierr; 79 PC_MG *mg = (PC_MG*)pc->data; 80 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 81 Mat Cmat,Pold=*a_P_inout; 82 MPI_Comm comm; 83 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 84 PetscInt ncrs_eq,ncrs,f_bs; 85 86 PetscFunctionBegin; 87 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 88 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 89 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 90 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 91 /* RAP */ 92 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 93 94 /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/ 95 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 96 if (pc_gamg->data_cell_rows>0) { 97 ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 98 } else { 99 PetscInt bs; 100 ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr); 101 ncrs = ncrs_eq/bs; 102 } 103 104 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 105 { 106 PetscInt ncrs_eq_glob; 107 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 108 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 109 if (new_size == 0) new_size = 1; /* not likely, posible? */ 110 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 111 } 112 113 if (Pcolumnperm) *Pcolumnperm = NULL; 114 115 if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 116 else { 117 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old; 118 IS is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices; 119 120 nloc_old = ncrs_eq/cr_bs; 121 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 122 #if defined PETSC_GAMG_USE_LOG 123 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 124 #endif 125 /* make 'is_eq_newproc' */ 126 ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr); 127 if (pc_gamg->repart) { 128 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 129 Mat adj; 130 131 if (pc_gamg->verbose>0) { 132 if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq); 133 else { 134 PetscInt n; 135 ierr = MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr); 136 PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n); 137 } 138 } 139 140 /* get 'adj' */ 141 if (cr_bs == 1) { 142 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 143 } else { 144 /* make a scalar matrix to partition (no Stokes here) */ 145 Mat tMat; 146 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 147 const PetscScalar *vals; 148 const PetscInt *idx; 149 PetscInt *d_nnz, *o_nnz, M, N; 150 static PetscInt llev = 0; 151 152 ierr = PetscMalloc1(ncrs, &d_nnz);CHKERRQ(ierr); 153 ierr = PetscMalloc1(ncrs, &o_nnz);CHKERRQ(ierr); 154 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 155 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 156 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 157 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 158 d_nnz[jj] = ncols/cr_bs; 159 o_nnz[jj] = ncols/cr_bs; 160 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 161 if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs; 162 if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs; 163 } 164 165 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 166 ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 167 ierr = MatSetType(tMat,MATAIJ);CHKERRQ(ierr); 168 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 169 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 170 ierr = PetscFree(d_nnz);CHKERRQ(ierr); 171 ierr = PetscFree(o_nnz);CHKERRQ(ierr); 172 173 for (ii = Istart_crs; ii < Iend_crs; ii++) { 174 PetscInt dest_row = ii/cr_bs; 175 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 176 for (jj = 0; jj < ncols; jj++) { 177 PetscInt dest_col = idx[jj]/cr_bs; 178 PetscScalar v = 1.0; 179 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 180 } 181 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 182 } 183 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 184 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 185 186 if (llev++ == -1) { 187 PetscViewer viewer; char fname[32]; 188 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 189 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 190 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 191 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 192 } 193 194 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 195 196 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 197 } /* create 'adj' */ 198 199 { /* partition: get newproc_idx */ 200 char prefix[256]; 201 const char *pcpre; 202 const PetscInt *is_idx; 203 MatPartitioning mpart; 204 IS proc_is; 205 PetscInt targetPE; 206 207 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 208 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 209 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 210 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 211 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 212 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 213 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 214 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 215 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 216 217 /* collect IS info */ 218 ierr = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr); 219 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 220 targetPE = 1; /* bring to "front" of machine */ 221 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 222 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 223 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 224 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 225 } 226 } 227 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 228 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 229 } 230 ierr = MatDestroy(&adj);CHKERRQ(ierr); 231 232 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 233 if (newproc_idx != 0) { 234 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 235 } 236 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 237 238 PetscInt rfactor,targetPE; 239 /* find factor */ 240 if (new_size == 1) rfactor = size; /* easy */ 241 else { 242 PetscReal best_fact = 0.; 243 jj = -1; 244 for (kk = 1 ; kk <= size ; kk++) { 245 if (size%kk==0) { /* a candidate */ 246 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 247 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 248 if (fact > best_fact) { 249 best_fact = fact; jj = kk; 250 } 251 } 252 } 253 if (jj != -1) rfactor = jj; 254 else rfactor = 1; /* does this happen .. a prime */ 255 } 256 new_size = size/rfactor; 257 258 if (new_size==nactive) { 259 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 260 ierr = PetscFree(counts);CHKERRQ(ierr); 261 if (pc_gamg->verbose>0) { 262 PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq); 268 targetPE = rank/rfactor; 269 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 270 } /* end simple 'is_eq_newproc' */ 271 272 /* 273 Create an index set from the is_eq_newproc index set to indicate the mapping TO 274 */ 275 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 276 is_eq_num_prim = is_eq_num; 277 /* 278 Determine how many equations/vertices are assigned to each processor 279 */ 280 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 281 ncrs_eq_new = counts[rank]; 282 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 283 ncrs_new = ncrs_eq_new/cr_bs; /* eqs */ 284 285 ierr = PetscFree(counts);CHKERRQ(ierr); 286 #if defined PETSC_GAMG_USE_LOG 287 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 288 #endif 289 /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */ 290 { 291 Vec src_crd, dest_crd; 292 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 293 VecScatter vecscat; 294 PetscScalar *array; 295 IS isscat; 296 297 /* move data (for primal equations only) */ 298 /* Create a vector to contain the newly ordered element information */ 299 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 300 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr); 301 ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */ 302 /* 303 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 304 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 305 */ 306 ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr); 307 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 308 for (ii=0,jj=0; ii<ncrs; ii++) { 309 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 310 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 311 } 312 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 313 ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 314 ierr = PetscFree(tidx);CHKERRQ(ierr); 315 /* 316 Create a vector to contain the original vertex information for each element 317 */ 318 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr); 319 for (jj=0; jj<ndata_cols; jj++) { 320 const PetscInt stride0=ncrs*pc_gamg->data_cell_rows; 321 for (ii=0; ii<ncrs; ii++) { 322 for (kk=0; kk<ndata_rows; kk++) { 323 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 324 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 325 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 326 } 327 } 328 } 329 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 330 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 331 /* 332 Scatter the element vertex information (still in the original vertex ordering) 333 to the correct processor 334 */ 335 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 336 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 337 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 338 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 339 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 340 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 341 /* 342 Put the element vertex data into a new allocation of the gdata->ele 343 */ 344 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 345 ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr); 346 347 pc_gamg->data_sz = node_data_sz*ncrs_new; 348 strideNew = ncrs_new*ndata_rows; 349 350 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 351 for (jj=0; jj<ndata_cols; jj++) { 352 for (ii=0; ii<ncrs_new; ii++) { 353 for (kk=0; kk<ndata_rows; kk++) { 354 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 355 pc_gamg->data[ix] = PetscRealPart(array[jx]); 356 } 357 } 358 } 359 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 360 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 361 } 362 /* move A and P (columns) with new layout */ 363 #if defined PETSC_GAMG_USE_LOG 364 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 365 #endif 366 367 /* 368 Invert for MatGetSubMatrix 369 */ 370 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 371 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 372 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 373 if (is_eq_num != is_eq_num_prim) { 374 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 375 } 376 if (Pcolumnperm) { 377 ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr); 378 *Pcolumnperm = new_eq_indices; 379 } 380 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 381 #if defined PETSC_GAMG_USE_LOG 382 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 383 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 384 #endif 385 /* 'a_Amat_crs' output */ 386 { 387 Mat mat; 388 ierr = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 389 *a_Amat_crs = mat; 390 391 if (!PETSC_TRUE) { 392 PetscInt cbs, rbs; 393 ierr = MatGetBlockSizes(Cmat, &rbs, &cbs);CHKERRQ(ierr); 394 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 395 ierr = MatGetBlockSizes(mat, &rbs, &cbs);CHKERRQ(ierr); 396 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);CHKERRQ(ierr); 397 } 398 } 399 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 400 401 #if defined PETSC_GAMG_USE_LOG 402 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 403 #endif 404 /* prolongator */ 405 { 406 IS findices; 407 PetscInt Istart,Iend; 408 Mat Pnew; 409 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 410 #if defined PETSC_GAMG_USE_LOG 411 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 412 #endif 413 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 414 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 415 ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 416 ierr = ISDestroy(&findices);CHKERRQ(ierr); 417 418 if (!PETSC_TRUE) { 419 PetscInt cbs, rbs; 420 ierr = MatGetBlockSizes(Pold, &rbs, &cbs);CHKERRQ(ierr); 421 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 422 ierr = MatGetBlockSizes(Pnew, &rbs, &cbs);CHKERRQ(ierr); 423 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 424 } 425 #if defined PETSC_GAMG_USE_LOG 426 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 427 #endif 428 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 429 430 /* output - repartitioned */ 431 *a_P_inout = Pnew; 432 } 433 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 434 435 *a_nactive_proc = new_size; /* output */ 436 } 437 438 /* outout matrix data */ 439 if (!PETSC_TRUE) { 440 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 441 if (llev==0) { 442 sprintf(fname,"Cmat_%d.m",llev++); 443 PetscViewerASCIIOpen(comm,fname,&viewer); 444 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 445 ierr = MatView(Amat_fine, viewer);CHKERRQ(ierr); 446 ierr = PetscViewerDestroy(&viewer); 447 } 448 sprintf(fname,"Cmat_%d.m",llev++); 449 PetscViewerASCIIOpen(comm,fname,&viewer); 450 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 451 ierr = MatView(Cmat, viewer);CHKERRQ(ierr); 452 ierr = PetscViewerDestroy(&viewer); 453 } 454 PetscFunctionReturn(0); 455 } 456 457 /* -------------------------------------------------------------------------- */ 458 /* 459 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 460 by setting data structures and options. 461 462 Input Parameter: 463 . pc - the preconditioner context 464 465 Application Interface Routine: PCSetUp() 466 467 Notes: 468 The interface routine PCSetUp() is not usually called directly by 469 the user, but instead is called by PCApply() if necessary. 470 */ 471 #undef __FUNCT__ 472 #define __FUNCT__ "PCSetUp_GAMG" 473 PetscErrorCode PCSetUp_GAMG(PC pc) 474 { 475 PetscErrorCode ierr; 476 PC_MG *mg = (PC_MG*)pc->data; 477 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 478 Mat Pmat = pc->pmat; 479 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 480 MPI_Comm comm; 481 PetscMPIInt rank,size,nactivepe; 482 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 483 PetscReal emaxs[GAMG_MAXLEVELS]; 484 IS *ASMLocalIDsArr[GAMG_MAXLEVELS]; 485 PetscLogDouble nnz0=0.,nnztot=0.; 486 MatInfo info; 487 PetscBool redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol); 488 489 PetscFunctionBegin; 490 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 491 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 492 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 493 494 if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled); 495 496 if (pc_gamg->setup_count++ > 0) { 497 if (redo_mesh_setup) { 498 /* reset everything */ 499 ierr = PCReset_MG(pc);CHKERRQ(ierr); 500 pc->setupcalled = 0; 501 } else { 502 PC_MG_Levels **mglevels = mg->levels; 503 /* just do Galerkin grids */ 504 Mat B,dA,dB; 505 506 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 507 if (pc_gamg->Nlevels > 1) { 508 /* currently only handle case where mat and pmat are the same on coarser levels */ 509 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr); 510 /* (re)set to get dirty flag */ 511 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr); 512 513 for (level=pc_gamg->Nlevels-2; level>=0; level--) { 514 /* the first time through the matrix structure has changed from repartitioning */ 515 if (pc_gamg->setup_count==2) { 516 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 517 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 518 519 mglevels[level]->A = B; 520 } else { 521 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr); 522 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 523 } 524 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr); 525 dB = B; 526 } 527 } 528 529 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 530 531 PetscFunctionReturn(0); 532 } 533 } 534 535 if (!pc_gamg->data) { 536 if (pc_gamg->orig_data) { 537 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 538 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 539 540 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 541 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 542 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 543 544 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr); 545 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 546 } else { 547 if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 548 ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr); 549 } 550 } 551 552 /* cache original data for reuse */ 553 if (!pc_gamg->orig_data && redo_mesh_setup) { 554 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr); 555 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 556 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 557 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 558 } 559 560 /* get basic dims */ 561 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 562 563 ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr); 564 if (pc_gamg->verbose) { 565 PetscInt NN = M; 566 if (pc_gamg->verbose==1) { 567 ierr = MatGetInfo(Pmat,MAT_LOCAL,&info);CHKERRQ(ierr); 568 ierr = MatGetLocalSize(Pmat, &NN, &qq);CHKERRQ(ierr); 569 if (!NN) NN=1; 570 } else { 571 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); 572 } 573 nnz0 = info.nz_used; 574 nnztot = info.nz_used; 575 ierr = PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 576 rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 577 (int)(nnz0/(PetscReal)NN),size);CHKERRQ(ierr); 578 } 579 580 /* Get A_i and R_i */ 581 for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */ 582 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); 583 level++) { 584 pc_gamg->firstCoarsen = (level ? PETSC_FALSE : PETSC_TRUE); 585 level1 = level + 1; 586 #if defined PETSC_GAMG_USE_LOG 587 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 588 #if (defined GAMG_STAGES) 589 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 590 #endif 591 #endif 592 { /* construct prolongator */ 593 Mat Gmat; 594 PetscCoarsenData *agg_lists; 595 Mat Prol11; 596 597 ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr); 598 ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 599 ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr); 600 601 /* could have failed to create new level */ 602 if (Prol11) { 603 /* get new block size of coarse matrices */ 604 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 605 606 if (pc_gamg->ops->optprolongator) { 607 /* smooth */ 608 ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr); 609 } 610 611 Parr[level1] = Prol11; 612 } else Parr[level1] = NULL; 613 614 if (pc_gamg->use_aggs_in_gasm) { 615 PetscInt bs; 616 ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr); 617 ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 618 } 619 620 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 621 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 622 } /* construct prolongator scope */ 623 #if defined PETSC_GAMG_USE_LOG 624 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 625 #endif 626 /* cache eigen estimate */ 627 if (pc_gamg->emax_id != -1) { 628 PetscBool flag; 629 ierr = PetscObjectComposedDataGetReal((PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag);CHKERRQ(ierr); 630 if (!flag) emaxs[level] = -1.; 631 } else emaxs[level] = -1.; 632 if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 633 if (!Parr[level1]) { 634 if (pc_gamg->verbose) { 635 ierr = PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);CHKERRQ(ierr); 636 } 637 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 638 ierr = PetscLogStagePop();CHKERRQ(ierr); 639 #endif 640 break; 641 } 642 #if defined PETSC_GAMG_USE_LOG 643 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 644 #endif 645 646 ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, 647 &Parr[level1], &Aarr[level1], &nactivepe, NULL);CHKERRQ(ierr); 648 649 #if defined PETSC_GAMG_USE_LOG 650 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 651 #endif 652 ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr); 653 654 if (pc_gamg->verbose > 0) { 655 PetscInt NN = M; 656 if (pc_gamg->verbose==1) { 657 ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info);CHKERRQ(ierr); 658 ierr = MatGetLocalSize(Aarr[level1], &NN, &qq);CHKERRQ(ierr); 659 if (!NN) NN=1; 660 } else { 661 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 662 } 663 664 nnztot += info.nz_used; 665 ierr = PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 666 rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols, 667 (int)(info.nz_used/(PetscReal)NN), nactivepe);CHKERRQ(ierr); 668 } 669 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 670 ierr = PetscLogStagePop();CHKERRQ(ierr); 671 #endif 672 /* stop if one node or one proc -- could pull back for singular problems */ 673 if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M < 2) ) { 674 if (pc_gamg->verbose) { 675 ierr = PetscPrintf(comm,"\t[%d]%s HARD stop of coarsening ?????????, level %d\n",rank,__FUNCT__,level);CHKERRQ(ierr); 676 } 677 level++; 678 break; 679 } 680 } /* levels */ 681 pc_gamg->firstCoarsen = PETSC_FALSE; 682 683 if (pc_gamg->data) { 684 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 685 pc_gamg->data = NULL; 686 } 687 688 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0); 689 pc_gamg->Nlevels = level + 1; 690 fine_level = level; 691 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 692 693 /* simple setup */ 694 if (!PETSC_TRUE) { 695 PC_MG_Levels **mglevels = mg->levels; 696 for (lidx=0,level=pc_gamg->Nlevels-1; 697 lidx<fine_level; 698 lidx++, level--) { 699 ierr = PCMGSetInterpolation(pc, lidx+1, Parr[level]);CHKERRQ(ierr); 700 ierr = KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level]);CHKERRQ(ierr); 701 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 702 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 703 } 704 ierr = KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0]);CHKERRQ(ierr); 705 706 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 707 } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 708 /* set default smoothers & set operators */ 709 for (lidx = 1, level = pc_gamg->Nlevels-2; 710 lidx <= fine_level; 711 lidx++, level--) { 712 KSP smoother; 713 PC subpc; 714 715 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 716 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 717 718 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 719 /* set ops */ 720 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr); 721 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 722 723 /* set defaults */ 724 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 725 726 /* set blocks for GASM smoother that uses the 'aggregates' */ 727 if (pc_gamg->use_aggs_in_gasm) { 728 PetscInt sz; 729 IS *is; 730 731 sz = nASMBlocksArr[level]; 732 is = ASMLocalIDsArr[level]; 733 ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr); 734 ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr); 735 if (sz==0) { 736 IS is; 737 PetscInt my0,kk; 738 ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr); 739 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 740 ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr); 741 ierr = ISDestroy(&is);CHKERRQ(ierr); 742 } else { 743 PetscInt kk; 744 ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr); 745 for (kk=0; kk<sz; kk++) { 746 ierr = ISDestroy(&is[kk]);CHKERRQ(ierr); 747 } 748 ierr = PetscFree(is);CHKERRQ(ierr); 749 } 750 ASMLocalIDsArr[level] = NULL; 751 nASMBlocksArr[level] = 0; 752 ierr = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr); 753 } else { 754 ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr); 755 } 756 } 757 { 758 /* coarse grid */ 759 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 760 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 761 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 762 ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr); 763 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 764 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 765 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 766 ierr = PCSetUp(subpc);CHKERRQ(ierr); 767 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 768 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 769 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 770 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 771 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 772 ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr); 773 /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in 774 * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to 775 * view every subdomain as though they were different. */ 776 ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE; 777 } 778 779 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 780 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 781 ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr); 782 ierr = PetscOptionsEnd();CHKERRQ(ierr); 783 if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used."); 784 785 /* create cheby smoothers */ 786 for (lidx = 1, level = pc_gamg->Nlevels-2; 787 lidx <= fine_level; 788 lidx++, level--) { 789 KSP smoother; 790 PetscBool flag,flag2; 791 PC subpc; 792 793 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 794 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 795 796 /* do my own cheby */ 797 ierr = PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);CHKERRQ(ierr); 798 if (flag) { 799 PetscReal emax, emin; 800 ierr = PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);CHKERRQ(ierr); 801 ierr = PetscObjectTypeCompare((PetscObject)subpc, PCSOR, &flag2);CHKERRQ(ierr); 802 if ((flag||flag2) && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC but lets acccept SOR because it is close and safe (always lower) */ 803 else { /* eigen estimate 'emax' -- this is done in cheby */ 804 KSP eksp; 805 Mat Lmat = Aarr[level]; 806 Vec bb, xx; 807 808 ierr = MatCreateVecs(Lmat, &bb, 0);CHKERRQ(ierr); 809 ierr = MatCreateVecs(Lmat, &xx, 0);CHKERRQ(ierr); 810 { 811 PetscRandom rctx; 812 ierr = PetscRandomCreate(comm,&rctx);CHKERRQ(ierr); 813 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 814 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 815 ierr = PetscRandomDestroy(&rctx);CHKERRQ(ierr); 816 } 817 818 /* zeroing out BC rows -- needed for crazy matrices */ 819 { 820 PetscInt Istart,Iend,ncols,jj,Ii; 821 PetscScalar zero = 0.0; 822 ierr = MatGetOwnershipRange(Lmat, &Istart, &Iend);CHKERRQ(ierr); 823 for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) { 824 ierr = MatGetRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 825 if (ncols <= 1) { 826 ierr = VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);CHKERRQ(ierr); 827 } 828 ierr = MatRestoreRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 829 } 830 ierr = VecAssemblyBegin(bb);CHKERRQ(ierr); 831 ierr = VecAssemblyEnd(bb);CHKERRQ(ierr); 832 } 833 834 ierr = KSPCreate(comm, &eksp);CHKERRQ(ierr); 835 ierr = KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);CHKERRQ(ierr); 836 ierr = KSPSetNormType(eksp, KSP_NORM_NONE);CHKERRQ(ierr); 837 ierr = KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);CHKERRQ(ierr); 838 ierr = KSPAppendOptionsPrefix(eksp, "gamg_est_");CHKERRQ(ierr); 839 ierr = KSPSetFromOptions(eksp);CHKERRQ(ierr); 840 841 ierr = KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);CHKERRQ(ierr); 842 ierr = KSPSetOperators(eksp, Lmat, Lmat);CHKERRQ(ierr); 843 ierr = KSPSetComputeSingularValues(eksp,PETSC_TRUE);CHKERRQ(ierr); 844 845 /* set PC type to be same as smoother */ 846 ierr = KSPSetPC(eksp, subpc);CHKERRQ(ierr); 847 848 /* solve - keep stuff out of logging */ 849 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 850 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 851 ierr = KSPSolve(eksp, bb, xx);CHKERRQ(ierr); 852 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 853 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 854 855 ierr = KSPComputeExtremeSingularValues(eksp, &emax, &emin);CHKERRQ(ierr); 856 857 ierr = VecDestroy(&xx);CHKERRQ(ierr); 858 ierr = VecDestroy(&bb);CHKERRQ(ierr); 859 ierr = KSPDestroy(&eksp);CHKERRQ(ierr); 860 861 if (pc_gamg->verbose > 0) { 862 PetscInt N1, tt; 863 ierr = MatGetSize(Aarr[level], &N1, &tt);CHKERRQ(ierr); 864 PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1); 865 } 866 } 867 { 868 PetscInt N1, N0; 869 ierr = MatGetSize(Aarr[level], &N1, NULL);CHKERRQ(ierr); 870 ierr = MatGetSize(Aarr[level+1], &N0, NULL);CHKERRQ(ierr); 871 /* heuristic - is this crap? */ 872 /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */ 873 emin = emax * pc_gamg->eigtarget[0]; 874 emax *= pc_gamg->eigtarget[1]; 875 } 876 ierr = KSPChebyshevSetEigenvalues(smoother, emax, emin);CHKERRQ(ierr); 877 } /* setup checby flag */ 878 } /* non-coarse levels */ 879 880 /* clean up */ 881 for (level=1; level<pc_gamg->Nlevels; level++) { 882 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 883 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 884 } 885 886 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 887 } else { 888 KSP smoother; 889 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__); 890 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 891 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr); 892 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 893 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 894 } 895 PetscFunctionReturn(0); 896 } 897 898 /* ------------------------------------------------------------------------- */ 899 /* 900 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 901 that was created with PCCreate_GAMG(). 902 903 Input Parameter: 904 . pc - the preconditioner context 905 906 Application Interface Routine: PCDestroy() 907 */ 908 #undef __FUNCT__ 909 #define __FUNCT__ "PCDestroy_GAMG" 910 PetscErrorCode PCDestroy_GAMG(PC pc) 911 { 912 PetscErrorCode ierr; 913 PC_MG *mg = (PC_MG*)pc->data; 914 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 915 916 PetscFunctionBegin; 917 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 918 if (pc_gamg->ops->destroy) { 919 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 920 } 921 ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr); 922 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 923 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 924 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 925 PetscFunctionReturn(0); 926 } 927 928 929 #undef __FUNCT__ 930 #define __FUNCT__ "PCGAMGSetProcEqLim" 931 /*@ 932 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction. 933 934 Logically Collective on PC 935 936 Input Parameters: 937 + pc - the preconditioner context 938 - n - the number of equations 939 940 941 Options Database Key: 942 . -pc_gamg_process_eq_limit <limit> 943 944 Level: intermediate 945 946 Concepts: Unstructured multrigrid preconditioner 947 948 .seealso: () 949 @*/ 950 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 951 { 952 PetscErrorCode ierr; 953 954 PetscFunctionBegin; 955 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 956 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 957 PetscFunctionReturn(0); 958 } 959 960 #undef __FUNCT__ 961 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 962 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 963 { 964 PC_MG *mg = (PC_MG*)pc->data; 965 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 966 967 PetscFunctionBegin; 968 if (n>0) pc_gamg->min_eq_proc = n; 969 PetscFunctionReturn(0); 970 } 971 972 #undef __FUNCT__ 973 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 974 /*@ 975 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 976 977 Collective on PC 978 979 Input Parameters: 980 + pc - the preconditioner context 981 - n - maximum number of equations to aim for 982 983 Options Database Key: 984 . -pc_gamg_coarse_eq_limit <limit> 985 986 Level: intermediate 987 988 Concepts: Unstructured multrigrid preconditioner 989 990 @*/ 991 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 992 { 993 PetscErrorCode ierr; 994 995 PetscFunctionBegin; 996 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 997 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 998 PetscFunctionReturn(0); 999 } 1000 1001 #undef __FUNCT__ 1002 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 1003 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 1004 { 1005 PC_MG *mg = (PC_MG*)pc->data; 1006 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1007 1008 PetscFunctionBegin; 1009 if (n>0) pc_gamg->coarse_eq_limit = n; 1010 PetscFunctionReturn(0); 1011 } 1012 1013 #undef __FUNCT__ 1014 #define __FUNCT__ "PCGAMGSetRepartitioning" 1015 /*@ 1016 PCGAMGSetRepartitioning - Repartition the coarse grids 1017 1018 Collective on PC 1019 1020 Input Parameters: 1021 + pc - the preconditioner context 1022 - n - PETSC_TRUE or PETSC_FALSE 1023 1024 Options Database Key: 1025 . -pc_gamg_repartition <true,false> 1026 1027 Level: intermediate 1028 1029 Concepts: Unstructured multrigrid preconditioner 1030 1031 .seealso: () 1032 @*/ 1033 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 1034 { 1035 PetscErrorCode ierr; 1036 1037 PetscFunctionBegin; 1038 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1039 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1040 PetscFunctionReturn(0); 1041 } 1042 1043 #undef __FUNCT__ 1044 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 1045 static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 1046 { 1047 PC_MG *mg = (PC_MG*)pc->data; 1048 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1049 1050 PetscFunctionBegin; 1051 pc_gamg->repart = n; 1052 PetscFunctionReturn(0); 1053 } 1054 1055 #undef __FUNCT__ 1056 #define __FUNCT__ "PCGAMGSetReuseInterpolation" 1057 /*@ 1058 PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner 1059 1060 Collective on PC 1061 1062 Input Parameters: 1063 + pc - the preconditioner context 1064 - n - PETSC_TRUE or PETSC_FALSE 1065 1066 Options Database Key: 1067 . -pc_gamg_reuse_interpolation <true,false> 1068 1069 Level: intermediate 1070 1071 Concepts: Unstructured multrigrid preconditioner 1072 1073 .seealso: () 1074 @*/ 1075 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n) 1076 { 1077 PetscErrorCode ierr; 1078 1079 PetscFunctionBegin; 1080 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1081 ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1082 PetscFunctionReturn(0); 1083 } 1084 1085 #undef __FUNCT__ 1086 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG" 1087 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) 1088 { 1089 PC_MG *mg = (PC_MG*)pc->data; 1090 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1091 1092 PetscFunctionBegin; 1093 pc_gamg->reuse_prol = n; 1094 PetscFunctionReturn(0); 1095 } 1096 1097 #undef __FUNCT__ 1098 #define __FUNCT__ "PCGAMGSetUseASMAggs" 1099 /*@ 1100 PCGAMGSetUseASMAggs - 1101 1102 Collective on PC 1103 1104 Input Parameters: 1105 . pc - the preconditioner context 1106 1107 1108 Options Database Key: 1109 . -pc_gamg_use_agg_gasm 1110 1111 Level: intermediate 1112 1113 Concepts: Unstructured multrigrid preconditioner 1114 1115 .seealso: () 1116 @*/ 1117 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 1118 { 1119 PetscErrorCode ierr; 1120 1121 PetscFunctionBegin; 1122 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1123 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1124 PetscFunctionReturn(0); 1125 } 1126 1127 #undef __FUNCT__ 1128 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 1129 static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 1130 { 1131 PC_MG *mg = (PC_MG*)pc->data; 1132 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1133 1134 PetscFunctionBegin; 1135 pc_gamg->use_aggs_in_gasm = n; 1136 PetscFunctionReturn(0); 1137 } 1138 1139 #undef __FUNCT__ 1140 #define __FUNCT__ "PCGAMGSetNlevels" 1141 /*@ 1142 PCGAMGSetNlevels - Sets the maximum number of levels PCGAMG will use 1143 1144 Not collective on PC 1145 1146 Input Parameters: 1147 + pc - the preconditioner 1148 - n - the maximum number of levels to use 1149 1150 Options Database Key: 1151 . -pc_mg_levels 1152 1153 Level: intermediate 1154 1155 Concepts: Unstructured multrigrid preconditioner 1156 1157 .seealso: () 1158 @*/ 1159 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1160 { 1161 PetscErrorCode ierr; 1162 1163 PetscFunctionBegin; 1164 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1165 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1166 PetscFunctionReturn(0); 1167 } 1168 1169 #undef __FUNCT__ 1170 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1171 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1172 { 1173 PC_MG *mg = (PC_MG*)pc->data; 1174 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1175 1176 PetscFunctionBegin; 1177 pc_gamg->Nlevels = n; 1178 PetscFunctionReturn(0); 1179 } 1180 1181 #undef __FUNCT__ 1182 #define __FUNCT__ "PCGAMGSetThreshold" 1183 /*@ 1184 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1185 1186 Not collective on PC 1187 1188 Input Parameters: 1189 + pc - the preconditioner context 1190 - threshold - the threshold value, 0.0 is the lowest possible 1191 1192 Options Database Key: 1193 . -pc_gamg_threshold <threshold> 1194 1195 Level: intermediate 1196 1197 Concepts: Unstructured multrigrid preconditioner 1198 1199 .seealso: () 1200 @*/ 1201 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1202 { 1203 PetscErrorCode ierr; 1204 1205 PetscFunctionBegin; 1206 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1207 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1208 PetscFunctionReturn(0); 1209 } 1210 1211 #undef __FUNCT__ 1212 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1213 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1214 { 1215 PC_MG *mg = (PC_MG*)pc->data; 1216 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1217 1218 PetscFunctionBegin; 1219 pc_gamg->threshold = n; 1220 PetscFunctionReturn(0); 1221 } 1222 1223 #undef __FUNCT__ 1224 #define __FUNCT__ "PCGAMGSetType" 1225 /*@ 1226 PCGAMGSetType - Set solution method 1227 1228 Collective on PC 1229 1230 Input Parameters: 1231 + pc - the preconditioner context 1232 - type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL 1233 1234 Options Database Key: 1235 . -pc_gamg_type <agg,geo,classical> 1236 1237 Level: intermediate 1238 1239 Concepts: Unstructured multrigrid preconditioner 1240 1241 .seealso: PCGAMGGetType(), PCGAMG 1242 @*/ 1243 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1244 { 1245 PetscErrorCode ierr; 1246 1247 PetscFunctionBegin; 1248 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1249 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1250 PetscFunctionReturn(0); 1251 } 1252 1253 #undef __FUNCT__ 1254 #define __FUNCT__ "PCGAMGGetType" 1255 /*@ 1256 PCGAMGGetType - Get solution method 1257 1258 Collective on PC 1259 1260 Input Parameter: 1261 . pc - the preconditioner context 1262 1263 Output Parameter: 1264 . type - the type of algorithm used 1265 1266 Level: intermediate 1267 1268 Concepts: Unstructured multrigrid preconditioner 1269 1270 .seealso: PCGAMGSetType() 1271 @*/ 1272 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type) 1273 { 1274 PetscErrorCode ierr; 1275 1276 PetscFunctionBegin; 1277 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1278 ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr); 1279 PetscFunctionReturn(0); 1280 } 1281 1282 #undef __FUNCT__ 1283 #define __FUNCT__ "PCGAMGGetType_GAMG" 1284 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type) 1285 { 1286 PC_MG *mg = (PC_MG*)pc->data; 1287 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1288 1289 PetscFunctionBegin; 1290 *type = pc_gamg->type; 1291 PetscFunctionReturn(0); 1292 } 1293 1294 #undef __FUNCT__ 1295 #define __FUNCT__ "PCGAMGSetType_GAMG" 1296 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1297 { 1298 PetscErrorCode ierr,(*r)(PC); 1299 PC_MG *mg = (PC_MG*)pc->data; 1300 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1301 1302 PetscFunctionBegin; 1303 pc_gamg->type = type; 1304 ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr); 1305 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1306 if (pc_gamg->ops->destroy) { 1307 /* there was something here - kill it */ 1308 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1309 ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr); 1310 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1311 /* cleaning up common data in pc_gamg - this should disapear someday */ 1312 pc_gamg->data_cell_cols = 0; 1313 pc_gamg->data_cell_rows = 0; 1314 pc_gamg->orig_data_cell_cols = 0; 1315 pc_gamg->orig_data_cell_rows = 0; 1316 if (pc_gamg->data_sz) { 1317 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 1318 pc_gamg->data_sz = 0; 1319 } else if (pc_gamg->data) { 1320 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); /* can this happen ? */ 1321 } 1322 } 1323 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1324 ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr); 1325 ierr = (*r)(pc);CHKERRQ(ierr); 1326 PetscFunctionReturn(0); 1327 } 1328 1329 #undef __FUNCT__ 1330 #define __FUNCT__ "PCView_GAMG" 1331 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer) 1332 { 1333 PetscErrorCode ierr; 1334 PC_MG *mg = (PC_MG*)pc->data; 1335 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1336 1337 PetscFunctionBegin; 1338 ierr = PetscViewerASCIIPrintf(viewer," GAMG specific options\n");CHKERRQ(ierr); 1339 if (pc_gamg->ops->view) { 1340 ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr); 1341 } 1342 PetscFunctionReturn(0); 1343 } 1344 1345 #undef __FUNCT__ 1346 #define __FUNCT__ "PCSetFromOptions_GAMG" 1347 PetscErrorCode PCSetFromOptions_GAMG(PetscOptions *PetscOptionsObject,PC pc) 1348 { 1349 PetscErrorCode ierr; 1350 PC_MG *mg = (PC_MG*)pc->data; 1351 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1352 PetscBool flag; 1353 PetscInt two = 2; 1354 MPI_Comm comm; 1355 1356 PetscFunctionBegin; 1357 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1358 ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr); 1359 { 1360 /* -pc_gamg_type */ 1361 { 1362 char tname[256]; 1363 ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr); 1364 if (flag) { 1365 ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr); 1366 } 1367 } 1368 /* -pc_gamg_verbose */ 1369 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG","none", pc_gamg->verbose,&pc_gamg->verbose, NULL);CHKERRQ(ierr); 1370 /* -pc_gamg_repartition */ 1371 ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr); 1372 /* -pc_gamg_reuse_interpolation */ 1373 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr); 1374 /* -pc_gamg_use_agg_gasm */ 1375 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);CHKERRQ(ierr); 1376 /* -pc_gamg_process_eq_limit */ 1377 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr); 1378 /* -pc_gamg_coarse_eq_limit */ 1379 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr); 1380 /* -pc_gamg_threshold */ 1381 ierr = PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);CHKERRQ(ierr); 1382 if (flag && pc_gamg->verbose) { 1383 ierr = PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);CHKERRQ(ierr); 1384 } 1385 /* -pc_gamg_eigtarget */ 1386 ierr = PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);CHKERRQ(ierr); 1387 ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 1388 1389 /* set options for subtype */ 1390 if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);} 1391 } 1392 ierr = PetscOptionsTail();CHKERRQ(ierr); 1393 PetscFunctionReturn(0); 1394 } 1395 1396 /* -------------------------------------------------------------------------- */ 1397 /*MC 1398 PCGAMG - Geometric algebraic multigrid (AMG) preconditioner 1399 1400 Options Database Keys: 1401 Multigrid options(inherited) 1402 + -pc_mg_cycles <v>: v or w (PCMGSetCycleType()) 1403 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1404 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1405 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1406 1407 1408 Notes: In order to obtain good performance for PCGAMG for vector valued problems you must 1409 $ Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point 1410 $ Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator 1411 $ See the Users Manual Chapter 4 for more details 1412 1413 Level: intermediate 1414 1415 Concepts: algebraic multigrid 1416 1417 .seealso: PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 1418 PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType() 1419 M*/ 1420 1421 #undef __FUNCT__ 1422 #define __FUNCT__ "PCCreate_GAMG" 1423 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) 1424 { 1425 PetscErrorCode ierr; 1426 PC_GAMG *pc_gamg; 1427 PC_MG *mg; 1428 #if defined PETSC_GAMG_USE_LOG 1429 static long count = 0; 1430 #endif 1431 1432 PetscFunctionBegin; 1433 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1434 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1435 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1436 1437 /* create a supporting struct and attach it to pc */ 1438 ierr = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr); 1439 mg = (PC_MG*)pc->data; 1440 mg->galerkin = 2; /* Use Galerkin, but it is computed externally */ 1441 mg->innerctx = pc_gamg; 1442 1443 ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr); 1444 1445 pc_gamg->setup_count = 0; 1446 /* these should be in subctx but repartitioning needs simple arrays */ 1447 pc_gamg->data_sz = 0; 1448 pc_gamg->data = 0; 1449 1450 /* register AMG type */ 1451 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1452 1453 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1454 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1455 pc->ops->setup = PCSetUp_GAMG; 1456 pc->ops->reset = PCReset_GAMG; 1457 pc->ops->destroy = PCDestroy_GAMG; 1458 mg->view = PCView_GAMG; 1459 1460 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1461 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1462 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr); 1463 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr); 1464 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr); 1465 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1466 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr); 1467 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr); 1468 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr); 1469 pc_gamg->repart = PETSC_FALSE; 1470 pc_gamg->reuse_prol = PETSC_FALSE; 1471 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1472 pc_gamg->min_eq_proc = 50; 1473 pc_gamg->coarse_eq_limit = 50; 1474 pc_gamg->threshold = 0.; 1475 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1476 pc_gamg->verbose = 0; 1477 pc_gamg->emax_id = -1; 1478 pc_gamg->firstCoarsen = PETSC_FALSE; 1479 pc_gamg->eigtarget[0] = 0.05; 1480 pc_gamg->eigtarget[1] = 1.05; 1481 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1482 1483 /* private events */ 1484 #if defined PETSC_GAMG_USE_LOG 1485 if (count++ == 0) { 1486 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1487 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1488 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1489 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1490 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1491 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1492 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1493 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1494 ierr = PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1495 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1496 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1497 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1498 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1499 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1500 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1501 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1502 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1503 1504 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1505 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1506 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1507 /* create timer stages */ 1508 #if defined GAMG_STAGES 1509 { 1510 char str[32]; 1511 PetscInt lidx; 1512 sprintf(str,"MG Level %d (finest)",0); 1513 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1514 for (lidx=1; lidx<9; lidx++) { 1515 sprintf(str,"MG Level %d",lidx); 1516 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1517 } 1518 } 1519 #endif 1520 } 1521 #endif 1522 /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */ 1523 ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr); 1524 PetscFunctionReturn(0); 1525 } 1526 1527 #undef __FUNCT__ 1528 #define __FUNCT__ "PCGAMGInitializePackage" 1529 /*@C 1530 PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called 1531 from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG() 1532 when using static libraries. 1533 1534 Level: developer 1535 1536 .keywords: PC, PCGAMG, initialize, package 1537 .seealso: PetscInitialize() 1538 @*/ 1539 PetscErrorCode PCGAMGInitializePackage(void) 1540 { 1541 PetscErrorCode ierr; 1542 1543 PetscFunctionBegin; 1544 if (PCGAMGPackageInitialized) PetscFunctionReturn(0); 1545 PCGAMGPackageInitialized = PETSC_TRUE; 1546 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr); 1547 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr); 1548 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr); 1549 ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr); 1550 1551 /* general events */ 1552 ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr); 1553 ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr); 1554 ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1555 ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1556 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1557 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1558 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr); 1559 1560 PetscFunctionReturn(0); 1561 } 1562 1563 #undef __FUNCT__ 1564 #define __FUNCT__ "PCGAMGFinalizePackage" 1565 /*@C 1566 PCGAMGFinalizePackage - This function destroys everything in the PCGAMG package. It is 1567 called from PetscFinalize(). 1568 1569 Level: developer 1570 1571 .keywords: Petsc, destroy, package 1572 .seealso: PetscFinalize() 1573 @*/ 1574 PetscErrorCode PCGAMGFinalizePackage(void) 1575 { 1576 PetscErrorCode ierr; 1577 1578 PetscFunctionBegin; 1579 PCGAMGPackageInitialized = PETSC_FALSE; 1580 ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr); 1581 PetscFunctionReturn(0); 1582 } 1583 1584 #undef __FUNCT__ 1585 #define __FUNCT__ "PCGAMGRegister" 1586 /*@C 1587 PCGAMGRegister - Register a PCGAMG implementation. 1588 1589 Input Parameters: 1590 + type - string that will be used as the name of the GAMG type. 1591 - create - function for creating the gamg context. 1592 1593 Level: advanced 1594 1595 .seealso: PCGAMGGetContext(), PCGAMGSetContext() 1596 @*/ 1597 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC)) 1598 { 1599 PetscErrorCode ierr; 1600 1601 PetscFunctionBegin; 1602 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1603 ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr); 1604 PetscFunctionReturn(0); 1605 } 1606 1607