1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include "petsc-private/matimpl.h" 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGgraph_AGG; 14 PetscLogEvent PC_GAMGGgraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptprol_AGG; 20 PetscLogEvent PC_GAMGKKTProl_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFunctionList GAMGList = 0; 31 32 /* ----------------------------------------------------------------------------- */ 33 #undef __FUNCT__ 34 #define __FUNCT__ "PCReset_GAMG" 35 PetscErrorCode PCReset_GAMG(PC pc) 36 { 37 PetscErrorCode ierr; 38 PC_MG *mg = (PC_MG*)pc->data; 39 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 40 41 PetscFunctionBegin; 42 if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */ 43 PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__); 44 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 45 } 46 pc_gamg->data = NULL; pc_gamg->data_sz = 0; 47 48 if (pc_gamg->orig_data) { 49 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 50 } 51 PetscFunctionReturn(0); 52 } 53 54 /* private 2x2 Mat Nest for Stokes */ 55 typedef struct { 56 Mat A11,A21,A12,Amat; 57 IS prim_is,constr_is; 58 } GAMGKKTMat; 59 60 #undef __FUNCT__ 61 #define __FUNCT__ "GAMGKKTMatCreate" 62 static PetscErrorCode GAMGKKTMatCreate(Mat A, PetscBool iskkt, GAMGKKTMat *out) 63 { 64 PetscFunctionBegin; 65 out->Amat = A; 66 if (iskkt) { 67 PetscErrorCode ierr; 68 IS is_constraint, is_prime; 69 PetscInt nmin,nmax; 70 71 ierr = MatGetOwnershipRange(A, &nmin, &nmax);CHKERRQ(ierr); 72 ierr = MatFindZeroDiagonals(A, &is_constraint);CHKERRQ(ierr); 73 ierr = ISComplement(is_constraint, nmin, nmax, &is_prime);CHKERRQ(ierr); 74 75 out->prim_is = is_prime; 76 out->constr_is = is_constraint; 77 78 ierr = MatGetSubMatrix(A, is_prime, is_prime, MAT_INITIAL_MATRIX, &out->A11);CHKERRQ(ierr); 79 ierr = MatGetSubMatrix(A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12);CHKERRQ(ierr); 80 ierr = MatGetSubMatrix(A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21);CHKERRQ(ierr); 81 } else { 82 out->A11 = A; 83 out->A21 = NULL; 84 out->A12 = NULL; 85 out->prim_is = NULL; 86 out->constr_is = NULL; 87 } 88 PetscFunctionReturn(0); 89 } 90 91 #undef __FUNCT__ 92 #define __FUNCT__ "GAMGKKTMatDestroy" 93 static PetscErrorCode GAMGKKTMatDestroy(GAMGKKTMat *mat) 94 { 95 PetscErrorCode ierr; 96 97 PetscFunctionBegin; 98 if (mat->A11 && mat->A11 != mat->Amat) { 99 ierr = MatDestroy(&mat->A11);CHKERRQ(ierr); 100 } 101 ierr = MatDestroy(&mat->A21);CHKERRQ(ierr); 102 ierr = MatDestroy(&mat->A12);CHKERRQ(ierr); 103 104 ierr = ISDestroy(&mat->prim_is);CHKERRQ(ierr); 105 ierr = ISDestroy(&mat->constr_is);CHKERRQ(ierr); 106 PetscFunctionReturn(0); 107 } 108 109 /* -------------------------------------------------------------------------- */ 110 /* 111 createLevel: create coarse op with RAP. repartition and/or reduce number 112 of active processors. 113 114 Input Parameter: 115 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 116 'pc_gamg->data_sz' are changed via repartitioning/reduction. 117 . Amat_fine - matrix on this fine (k) level 118 . cr_bs - coarse block size 119 . isLast - 120 . stokes - 121 In/Output Parameter: 122 . a_P_inout - prolongation operator to the next level (k-->k-1) 123 . a_nactive_proc - number of active procs 124 Output Parameter: 125 . a_Amat_crs - coarse matrix that is created (k-1) 126 */ 127 128 #undef __FUNCT__ 129 #define __FUNCT__ "createLevel" 130 static PetscErrorCode createLevel(const PC pc,const Mat Amat_fine,const PetscInt cr_bs,const PetscBool isLast,const PetscBool stokes,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc) 131 { 132 PetscErrorCode ierr; 133 PC_MG *mg = (PC_MG*)pc->data; 134 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 135 const PetscBool repart = pc_gamg->repart; 136 const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit; 137 Mat Cmat,Pold=*a_P_inout; 138 MPI_Comm wcomm = ((PetscObject)Amat_fine)->comm; 139 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 140 PetscInt ncrs_eq,ncrs_prim,f_bs; 141 142 PetscFunctionBegin; 143 ierr = MPI_Comm_rank(wcomm, &rank);CHKERRQ(ierr); 144 ierr = MPI_Comm_size(wcomm, &size);CHKERRQ(ierr); 145 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 146 /* RAP */ 147 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 148 149 /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/ 150 ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 151 if (pc_gamg->data_sz % (pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"pc_gamg->data_sz %D not divisible by (pc_gamg->data_cell_cols %D *pc_gamg->data_cell_rows %D)",pc_gamg->data_sz,pc_gamg->data_cell_cols,pc_gamg->data_cell_rows); 152 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 153 154 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 155 { 156 PetscInt ncrs_eq_glob; 157 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 158 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 159 if (new_size == 0 || ncrs_eq_glob < coarse_max) new_size = 1; 160 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 161 if (isLast) new_size = 1; 162 } 163 164 if (!repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 165 else { 166 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 167 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old; 168 IS is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices; 169 VecScatter vecscat; 170 PetscScalar *array; 171 Vec src_crd, dest_crd; 172 173 nloc_old = ncrs_eq/cr_bs; 174 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 175 #if defined PETSC_GAMG_USE_LOG 176 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 177 #endif 178 /* make 'is_eq_newproc' */ 179 ierr = PetscMalloc(size*sizeof(PetscInt), &counts);CHKERRQ(ierr); 180 if (repart && !stokes) { 181 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 182 Mat adj; 183 184 if (pc_gamg->verbose>0) { 185 if (pc_gamg->verbose==1) PetscPrintf(wcomm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq); 186 else { 187 PetscInt n; 188 ierr = MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, wcomm);CHKERRQ(ierr); 189 PetscPrintf(wcomm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n); 190 } 191 } 192 193 /* get 'adj' */ 194 if (cr_bs == 1) { 195 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 196 } else { 197 /* make a scalar matrix to partition (no Stokes here) */ 198 Mat tMat; 199 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 200 const PetscScalar *vals; 201 const PetscInt *idx; 202 PetscInt *d_nnz, *o_nnz, M, N; 203 static PetscInt llev = 0; 204 205 ierr = PetscMalloc(ncrs_prim*sizeof(PetscInt), &d_nnz);CHKERRQ(ierr); 206 ierr = PetscMalloc(ncrs_prim*sizeof(PetscInt), &o_nnz);CHKERRQ(ierr); 207 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 208 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 209 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 210 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 211 d_nnz[jj] = ncols/cr_bs; 212 o_nnz[jj] = ncols/cr_bs; 213 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 214 if (d_nnz[jj] > ncrs_prim) d_nnz[jj] = ncrs_prim; 215 if (o_nnz[jj] > (M/cr_bs-ncrs_prim)) o_nnz[jj] = M/cr_bs-ncrs_prim; 216 } 217 218 ierr = MatCreate(wcomm, &tMat);CHKERRQ(ierr); 219 ierr = MatSetSizes(tMat, ncrs_prim, ncrs_prim,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 220 ierr = MatSetType(tMat,MATAIJ);CHKERRQ(ierr); 221 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 222 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 223 ierr = PetscFree(d_nnz);CHKERRQ(ierr); 224 ierr = PetscFree(o_nnz);CHKERRQ(ierr); 225 226 for (ii = Istart_crs; ii < Iend_crs; ii++) { 227 PetscInt dest_row = ii/cr_bs; 228 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 229 for (jj = 0; jj < ncols; jj++) { 230 PetscInt dest_col = idx[jj]/cr_bs; 231 PetscScalar v = 1.0; 232 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 233 } 234 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 235 } 236 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 237 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 238 239 if (llev++ == -1) { 240 PetscViewer viewer; char fname[32]; 241 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 242 PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer); 243 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 244 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 245 } 246 247 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 248 249 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 250 } /* create 'adj' */ 251 252 { /* partition: get newproc_idx */ 253 char prefix[256]; 254 const char *pcpre; 255 const PetscInt *is_idx; 256 MatPartitioning mpart; 257 IS proc_is; 258 PetscInt targetPE; 259 260 ierr = MatPartitioningCreate(wcomm, &mpart);CHKERRQ(ierr); 261 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 262 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 263 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 264 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 265 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 266 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 267 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 268 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 269 270 /* collect IS info */ 271 ierr = PetscMalloc(ncrs_eq*sizeof(PetscInt), &newproc_idx);CHKERRQ(ierr); 272 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 273 targetPE = 1; /* bring to "front" of machine */ 274 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 275 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 276 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 277 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 278 } 279 } 280 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 281 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 282 } 283 ierr = MatDestroy(&adj);CHKERRQ(ierr); 284 285 ierr = ISCreateGeneral(wcomm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 286 if (newproc_idx != 0) { 287 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 288 } 289 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 290 291 PetscInt rfactor,targetPE; 292 /* find factor */ 293 if (new_size == 1) rfactor = size; /* easy */ 294 else { 295 PetscReal best_fact = 0.; 296 jj = -1; 297 for (kk = 1 ; kk <= size ; kk++) { 298 if (size%kk==0) { /* a candidate */ 299 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 300 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 301 if (fact > best_fact) { 302 best_fact = fact; jj = kk; 303 } 304 } 305 } 306 if (jj != -1) rfactor = jj; 307 else rfactor = 1; /* does this happen .. a prime */ 308 } 309 new_size = size/rfactor; 310 311 if (new_size==nactive) { 312 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 313 ierr = PetscFree(counts);CHKERRQ(ierr); 314 if (pc_gamg->verbose>0) { 315 PetscPrintf(wcomm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq); 316 } 317 PetscFunctionReturn(0); 318 } 319 320 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq); 321 targetPE = rank/rfactor; 322 ierr = ISCreateStride(wcomm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 323 324 if (stokes) { 325 ierr = ISCreateStride(wcomm, ncrs_prim*cr_bs, targetPE, 0, &is_eq_newproc_prim);CHKERRQ(ierr); 326 } 327 } /* end simple 'is_eq_newproc' */ 328 329 /* 330 Create an index set from the is_eq_newproc index set to indicate the mapping TO 331 */ 332 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 333 if (stokes) { 334 ierr = ISPartitioningToNumbering(is_eq_newproc_prim, &is_eq_num_prim);CHKERRQ(ierr); 335 } else is_eq_num_prim = is_eq_num; 336 /* 337 Determine how many equations/vertices are assigned to each processor 338 */ 339 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 340 ncrs_eq_new = counts[rank]; 341 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 342 if (stokes) { 343 ierr = ISPartitioningCount(is_eq_newproc_prim, size, counts);CHKERRQ(ierr); 344 ierr = ISDestroy(&is_eq_newproc_prim);CHKERRQ(ierr); 345 ncrs_prim_new = counts[rank]/cr_bs; /* nodes */ 346 } else ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */ 347 348 ierr = PetscFree(counts);CHKERRQ(ierr); 349 #if defined PETSC_GAMG_USE_LOG 350 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 351 #endif 352 353 /* move data (for primal equations only) */ 354 /* Create a vector to contain the newly ordered element information */ 355 ierr = VecCreate(wcomm, &dest_crd);CHKERRQ(ierr); 356 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE);CHKERRQ(ierr); 357 ierr = VecSetFromOptions(dest_crd);CHKERRQ(ierr); /* this is needed! */ 358 /* 359 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 360 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 361 */ 362 ierr = PetscMalloc((ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx);CHKERRQ(ierr); 363 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 364 for (ii=0,jj=0; ii<ncrs_prim; ii++) { 365 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 366 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 367 } 368 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 369 ierr = ISCreateGeneral(wcomm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 370 ierr = PetscFree(tidx);CHKERRQ(ierr); 371 /* 372 Create a vector to contain the original vertex information for each element 373 */ 374 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd);CHKERRQ(ierr); 375 for (jj=0; jj<ndata_cols; jj++) { 376 const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows; 377 for (ii=0; ii<ncrs_prim; ii++) { 378 for (kk=0; kk<ndata_rows; kk++) { 379 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 380 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 381 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 382 } 383 } 384 } 385 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 386 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 387 /* 388 Scatter the element vertex information (still in the original vertex ordering) 389 to the correct processor 390 */ 391 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 392 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 393 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 394 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 395 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 396 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 397 /* 398 Put the element vertex data into a new allocation of the gdata->ele 399 */ 400 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 401 ierr = PetscMalloc(node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data);CHKERRQ(ierr); 402 403 pc_gamg->data_sz = node_data_sz*ncrs_prim_new; 404 strideNew = ncrs_prim_new*ndata_rows; 405 406 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 407 for (jj=0; jj<ndata_cols; jj++) { 408 for (ii=0; ii<ncrs_prim_new; ii++) { 409 for (kk=0; kk<ndata_rows; kk++) { 410 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 411 pc_gamg->data[ix] = PetscRealPart(array[jx]); 412 } 413 } 414 } 415 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 416 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 417 418 /* move A and P (columns) with new layout */ 419 #if defined PETSC_GAMG_USE_LOG 420 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 421 #endif 422 423 /* 424 Invert for MatGetSubMatrix 425 */ 426 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 427 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 428 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 429 if (is_eq_num != is_eq_num_prim) { 430 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 431 } 432 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 433 #if defined PETSC_GAMG_USE_LOG 434 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 435 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 436 #endif 437 /* 'a_Amat_crs' output */ 438 { 439 Mat mat; 440 ierr = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 441 *a_Amat_crs = mat; 442 443 if (!PETSC_TRUE) { 444 PetscInt cbs, rbs; 445 ierr = MatGetBlockSizes(Cmat, &rbs, &cbs);CHKERRQ(ierr); 446 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 447 ierr = MatGetBlockSizes(mat, &rbs, &cbs);CHKERRQ(ierr); 448 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);CHKERRQ(ierr); 449 } 450 } 451 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 452 453 #if defined PETSC_GAMG_USE_LOG 454 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 455 #endif 456 /* prolongator */ 457 { 458 IS findices; 459 PetscInt Istart,Iend; 460 Mat Pnew; 461 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 462 #if defined PETSC_GAMG_USE_LOG 463 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 464 #endif 465 ierr = ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 466 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 467 ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 468 ierr = ISDestroy(&findices);CHKERRQ(ierr); 469 470 if (!PETSC_TRUE) { 471 PetscInt cbs, rbs; 472 ierr = MatGetBlockSizes(Pold, &rbs, &cbs);CHKERRQ(ierr); 473 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 474 ierr = MatGetBlockSizes(Pnew, &rbs, &cbs);CHKERRQ(ierr); 475 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 476 } 477 #if defined PETSC_GAMG_USE_LOG 478 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 479 #endif 480 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 481 482 /* output - repartitioned */ 483 *a_P_inout = Pnew; 484 } 485 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 486 487 *a_nactive_proc = new_size; /* output */ 488 } 489 490 /* outout matrix data */ 491 if (!PETSC_TRUE) { 492 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 493 if (llev==0) { 494 sprintf(fname,"Cmat_%d.m",llev++); 495 PetscViewerASCIIOpen(wcomm,fname,&viewer); 496 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 497 ierr = MatView(Amat_fine, viewer);CHKERRQ(ierr); 498 ierr = PetscViewerDestroy(&viewer); 499 } 500 sprintf(fname,"Cmat_%d.m",llev++); 501 PetscViewerASCIIOpen(wcomm,fname,&viewer); 502 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 503 ierr = MatView(Cmat, viewer);CHKERRQ(ierr); 504 ierr = PetscViewerDestroy(&viewer); 505 } 506 PetscFunctionReturn(0); 507 } 508 509 /* -------------------------------------------------------------------------- */ 510 /* 511 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 512 by setting data structures and options. 513 514 Input Parameter: 515 . pc - the preconditioner context 516 517 Application Interface Routine: PCSetUp() 518 519 Notes: 520 The interface routine PCSetUp() is not usually called directly by 521 the user, but instead is called by PCApply() if necessary. 522 */ 523 #undef __FUNCT__ 524 #define __FUNCT__ "PCSetUp_GAMG" 525 PetscErrorCode PCSetUp_GAMG(PC pc) 526 { 527 PetscErrorCode ierr; 528 PC_MG *mg = (PC_MG*)pc->data; 529 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 530 Mat Pmat = pc->pmat; 531 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 532 MPI_Comm wcomm; 533 PetscMPIInt rank,size,nactivepe; 534 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 535 PetscReal emaxs[GAMG_MAXLEVELS]; 536 IS *ASMLocalIDsArr[GAMG_MAXLEVELS]; 537 GAMGKKTMat kktMatsArr[GAMG_MAXLEVELS]; 538 PetscLogDouble nnz0=0.,nnztot=0.; 539 MatInfo info; 540 PetscBool stokes = PETSC_FALSE, redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol); 541 542 PetscFunctionBegin; 543 ierr = PetscObjectGetComm((PetscObject)pc,&wcomm);CHKERRQ(ierr); 544 ierr = MPI_Comm_rank(wcomm,&rank);CHKERRQ(ierr); 545 ierr = MPI_Comm_size(wcomm,&size);CHKERRQ(ierr); 546 547 if (pc_gamg->verbose>2) PetscPrintf(wcomm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled); 548 549 if (pc_gamg->setup_count++ > 0) { 550 if (redo_mesh_setup) { 551 /* reset everything */ 552 ierr = PCReset_MG(pc);CHKERRQ(ierr); 553 pc->setupcalled = 0; 554 } else { 555 PC_MG_Levels **mglevels = mg->levels; 556 /* just do Galerkin grids */ 557 Mat B,dA,dB; 558 559 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 560 if (pc_gamg->Nlevels > 1) { 561 /* currently only handle case where mat and pmat are the same on coarser levels */ 562 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,NULL);CHKERRQ(ierr); 563 /* (re)set to get dirty flag */ 564 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 565 566 for (level=pc_gamg->Nlevels-2; level>-1; level--) { 567 /* the first time through the matrix structure has changed from repartitioning */ 568 if (pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) { 569 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 570 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 571 572 mglevels[level]->A = B; 573 } else { 574 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B,NULL);CHKERRQ(ierr); 575 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 576 } 577 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 578 dB = B; 579 } 580 } 581 582 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 583 584 /* PCSetUp_MG seems to insists on setting this to GMRES */ 585 ierr = KSPSetType(mglevels[0]->smoothd, KSPPREONLY);CHKERRQ(ierr); 586 PetscFunctionReturn(0); 587 } 588 } 589 590 ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,NULL);CHKERRQ(ierr); 591 592 ierr = GAMGKKTMatCreate(Pmat, stokes, &kktMatsArr[0]);CHKERRQ(ierr); 593 594 if (!pc_gamg->data) { 595 if (pc_gamg->orig_data) { 596 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 597 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 598 599 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 600 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 601 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 602 603 ierr = PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->data);CHKERRQ(ierr); 604 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 605 } else { 606 if (!pc_gamg->createdefaultdata) SETERRQ(wcomm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 607 if (stokes) SETERRQ(wcomm,PETSC_ERR_PLIB,"Need data (eg, PCSetCoordinates) for Stokes problems"); 608 ierr = pc_gamg->createdefaultdata(pc, kktMatsArr[0].A11);CHKERRQ(ierr); 609 } 610 } 611 612 /* cache original data for reuse */ 613 if (!pc_gamg->orig_data && redo_mesh_setup) { 614 ierr = PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->orig_data);CHKERRQ(ierr); 615 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 616 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 617 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 618 } 619 620 /* get basic dims */ 621 if (stokes) bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */ 622 else { 623 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 624 } 625 626 ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr); 627 if (pc_gamg->verbose) { 628 PetscInt NN = M; 629 if (pc_gamg->verbose==1) { 630 ierr = MatGetInfo(Pmat,MAT_LOCAL,&info);CHKERRQ(ierr); 631 ierr = MatGetLocalSize(Pmat, &NN, &qq);CHKERRQ(ierr); 632 } else { 633 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); 634 } 635 nnz0 = info.nz_used; 636 nnztot = info.nz_used; 637 ierr = PetscPrintf(wcomm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 638 rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 639 (int)(nnz0/(PetscReal)NN),size);CHKERRQ(ierr); 640 } 641 642 /* Get A_i and R_i */ 643 for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */ 644 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (size==1 || nactivepe>1); */ 645 level++) { 646 level1 = level + 1; 647 #if defined PETSC_GAMG_USE_LOG 648 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 649 #if (defined GAMG_STAGES) 650 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 651 #endif 652 #endif 653 /* deal with Stokes, get sub matrices */ 654 if (level > 0) { 655 ierr = GAMGKKTMatCreate(Aarr[level], stokes, &kktMatsArr[level]);CHKERRQ(ierr); 656 } 657 { /* construct prolongator */ 658 Mat Gmat; 659 PetscCoarsenData *agg_lists; 660 Mat Prol11,Prol22; 661 662 ierr = pc_gamg->graph(pc,kktMatsArr[level].A11, &Gmat);CHKERRQ(ierr); 663 ierr = pc_gamg->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 664 ierr = pc_gamg->prolongator(pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11);CHKERRQ(ierr); 665 666 /* could have failed to create new level */ 667 if (Prol11) { 668 /* get new block size of coarse matrices */ 669 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 670 671 if (stokes) { 672 if (!pc_gamg->formkktprol) SETERRQ(wcomm,PETSC_ERR_USER,"Stokes not supportd by AMG method."); 673 /* R A12 == (T = A21 P)'; G = T' T; coarsen G; form plain agg with G */ 674 ierr = pc_gamg->formkktprol(pc, Prol11, kktMatsArr[level].A21, &Prol22);CHKERRQ(ierr); 675 } 676 677 if (pc_gamg->optprol) { 678 /* smooth */ 679 ierr = pc_gamg->optprol(pc, kktMatsArr[level].A11, &Prol11);CHKERRQ(ierr); 680 } 681 682 if (stokes) { 683 IS is_row[2]; 684 Mat a[4]; 685 686 is_row[0] = kktMatsArr[level].prim_is; is_row[1] = kktMatsArr[level].constr_is; 687 a[0] = Prol11; a[1] = NULL; a[2] = NULL; a[3] = Prol22; 688 ierr = MatCreateNest(wcomm,2,is_row, 2, is_row, a, &Parr[level1]);CHKERRQ(ierr); 689 } else Parr[level1] = Prol11; 690 } else Parr[level1] = NULL; 691 692 if (pc_gamg->use_aggs_in_gasm) { 693 ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 694 } 695 696 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 697 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 698 } /* construct prolongator scope */ 699 #if defined PETSC_GAMG_USE_LOG 700 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 701 #endif 702 /* cache eigen estimate */ 703 if (pc_gamg->emax_id != -1) { 704 PetscBool flag; 705 ierr = PetscObjectComposedDataGetReal((PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag);CHKERRQ(ierr); 706 if (!flag) emaxs[level] = -1.; 707 } else emaxs[level] = -1.; 708 if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 709 if (!Parr[level1]) { 710 if (pc_gamg->verbose) { 711 ierr = PetscPrintf(wcomm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);CHKERRQ(ierr); 712 } 713 break; 714 } 715 #if defined PETSC_GAMG_USE_LOG 716 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 717 #endif 718 719 ierr = createLevel(pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2), 720 stokes, &Parr[level1], &Aarr[level1], &nactivepe);CHKERRQ(ierr); 721 722 #if defined PETSC_GAMG_USE_LOG 723 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 724 #endif 725 ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr); 726 727 if (pc_gamg->verbose > 0) { 728 PetscInt NN = M; 729 if (pc_gamg->verbose==1) { 730 ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info);CHKERRQ(ierr); 731 ierr = MatGetLocalSize(Aarr[level1], &NN, &qq);CHKERRQ(ierr); 732 } else { 733 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 734 } 735 736 nnztot += info.nz_used; 737 ierr = PetscPrintf(wcomm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 738 rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols, 739 (int)(info.nz_used/(PetscReal)NN), nactivepe);CHKERRQ(ierr); 740 } 741 742 /* stop if one node -- could pull back for singular problems */ 743 if (M/pc_gamg->data_cell_cols < 2) { 744 level++; 745 break; 746 } 747 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 748 ierr = PetscLogStagePop();CHKERRQ(ierr); 749 #endif 750 } /* levels */ 751 752 if (pc_gamg->data) { 753 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 754 pc_gamg->data = NULL; 755 } 756 757 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0); 758 pc_gamg->Nlevels = level + 1; 759 fine_level = level; 760 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 761 762 /* simple setup */ 763 if (!PETSC_TRUE) { 764 PC_MG_Levels **mglevels = mg->levels; 765 for (lidx=0,level=pc_gamg->Nlevels-1; 766 lidx<fine_level; 767 lidx++, level--) { 768 ierr = PCMGSetInterpolation(pc, lidx+1, Parr[level]);CHKERRQ(ierr); 769 ierr = KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 770 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 771 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 772 } 773 ierr = KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 774 775 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 776 } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 777 /* set default smoothers & set operators */ 778 for (lidx = 1, level = pc_gamg->Nlevels-2; 779 lidx <= fine_level; 780 lidx++, level--) { 781 KSP smoother; 782 PC subpc; 783 784 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 785 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 786 787 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 788 /* set ops */ 789 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 790 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 791 792 /* create field split PC, get subsmoother */ 793 if (stokes) { 794 KSP *ksps; 795 PetscInt nn; 796 ierr = PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is);CHKERRQ(ierr); 797 ierr = PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is);CHKERRQ(ierr); 798 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps);CHKERRQ(ierr); 799 smoother = ksps[0]; 800 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 801 ierr = PetscFree(ksps);CHKERRQ(ierr); 802 } 803 ierr = GAMGKKTMatDestroy(&kktMatsArr[level]);CHKERRQ(ierr); 804 805 /* set defaults */ 806 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 807 808 /* override defaults and command line args (!) */ 809 if (pc_gamg->use_aggs_in_gasm) { 810 PetscInt sz; 811 IS *is; 812 813 sz = nASMBlocksArr[level]; 814 is = ASMLocalIDsArr[level]; 815 ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr); 816 if (sz==0) { 817 IS is; 818 PetscInt my0,kk; 819 ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr); 820 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 821 ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr); 822 ierr = ISDestroy(&is);CHKERRQ(ierr); 823 } else { 824 PetscInt kk; 825 ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr); 826 for (kk=0; kk<sz; kk++) { 827 ierr = ISDestroy(&is[kk]);CHKERRQ(ierr); 828 } 829 ierr = PetscFree(is);CHKERRQ(ierr); 830 } 831 ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr); 832 833 ASMLocalIDsArr[level] = NULL; 834 nASMBlocksArr[level] = 0; 835 ierr = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr); 836 } else { 837 ierr = PCSetType(subpc, PCJACOBI);CHKERRQ(ierr); 838 } 839 } 840 { 841 /* coarse grid */ 842 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 843 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 844 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 845 ierr = KSPSetOperators(smoother, Lmat, Lmat, SAME_NONZERO_PATTERN);CHKERRQ(ierr); 846 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 847 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 848 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 849 ierr = PCSetUp(subpc);CHKERRQ(ierr); 850 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 851 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 852 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 853 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 854 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 855 } 856 857 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 858 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 859 ierr = PCSetFromOptions_MG(pc);CHKERRQ(ierr); 860 ierr = PetscOptionsEnd();CHKERRQ(ierr); 861 if (mg->galerkin != 2) SETERRQ(wcomm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used."); 862 863 /* create cheby smoothers */ 864 for (lidx = 1, level = pc_gamg->Nlevels-2; 865 lidx <= fine_level; 866 lidx++, level--) { 867 KSP smoother; 868 PetscBool flag; 869 PC subpc; 870 871 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 872 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 873 874 /* create field split PC, get subsmoother */ 875 if (stokes) { 876 KSP *ksps; 877 PetscInt nn; 878 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps);CHKERRQ(ierr); 879 smoother = ksps[0]; 880 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 881 ierr = PetscFree(ksps);CHKERRQ(ierr); 882 } 883 884 /* do my own cheby */ 885 ierr = PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);CHKERRQ(ierr); 886 if (flag) { 887 PetscReal emax, emin; 888 ierr = PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);CHKERRQ(ierr); 889 if (flag && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC */ 890 else { /* eigen estimate 'emax' */ 891 KSP eksp; 892 Mat Lmat = Aarr[level]; 893 Vec bb, xx; 894 895 ierr = MatGetVecs(Lmat, &bb, 0);CHKERRQ(ierr); 896 ierr = MatGetVecs(Lmat, &xx, 0);CHKERRQ(ierr); 897 { 898 PetscRandom rctx; 899 ierr = PetscRandomCreate(wcomm,&rctx);CHKERRQ(ierr); 900 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 901 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 902 ierr = PetscRandomDestroy(&rctx);CHKERRQ(ierr); 903 } 904 905 /* zeroing out BC rows -- needed for crazy matrices */ 906 { 907 PetscInt Istart,Iend,ncols,jj,Ii; 908 PetscScalar zero = 0.0; 909 ierr = MatGetOwnershipRange(Lmat, &Istart, &Iend);CHKERRQ(ierr); 910 for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) { 911 ierr = MatGetRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 912 if (ncols <= 1) { 913 ierr = VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);CHKERRQ(ierr); 914 } 915 ierr = MatRestoreRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 916 } 917 ierr = VecAssemblyBegin(bb);CHKERRQ(ierr); 918 ierr = VecAssemblyEnd(bb);CHKERRQ(ierr); 919 } 920 921 ierr = KSPCreate(wcomm, &eksp);CHKERRQ(ierr); 922 ierr = KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);CHKERRQ(ierr); 923 ierr = KSPSetNormType(eksp, KSP_NORM_NONE);CHKERRQ(ierr); 924 ierr = KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);CHKERRQ(ierr); 925 ierr = KSPAppendOptionsPrefix(eksp, "gamg_est_");CHKERRQ(ierr); 926 ierr = KSPSetFromOptions(eksp);CHKERRQ(ierr); 927 928 ierr = KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);CHKERRQ(ierr); 929 ierr = KSPSetOperators(eksp, Lmat, Lmat, SAME_NONZERO_PATTERN);CHKERRQ(ierr); 930 ierr = KSPSetComputeSingularValues(eksp,PETSC_TRUE);CHKERRQ(ierr); 931 932 /* set PC type to be same as smoother */ 933 ierr = KSPSetPC(eksp, subpc);CHKERRQ(ierr); 934 935 /* solve - keep stuff out of logging */ 936 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 937 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 938 ierr = KSPSolve(eksp, bb, xx);CHKERRQ(ierr); 939 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 940 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 941 942 ierr = KSPComputeExtremeSingularValues(eksp, &emax, &emin);CHKERRQ(ierr); 943 944 ierr = VecDestroy(&xx);CHKERRQ(ierr); 945 ierr = VecDestroy(&bb);CHKERRQ(ierr); 946 ierr = KSPDestroy(&eksp);CHKERRQ(ierr); 947 948 if (pc_gamg->verbose > 0) { 949 PetscInt N1, tt; 950 ierr = MatGetSize(Aarr[level], &N1, &tt);CHKERRQ(ierr); 951 PetscPrintf(wcomm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1); 952 } 953 } 954 { 955 PetscInt N1, N0; 956 ierr = MatGetSize(Aarr[level], &N1, NULL);CHKERRQ(ierr); 957 ierr = MatGetSize(Aarr[level+1], &N0, NULL);CHKERRQ(ierr); 958 /* heuristic - is this crap? */ 959 /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */ 960 emin = emax * pc_gamg->eigtarget[0]; 961 emax *= pc_gamg->eigtarget[1]; 962 } 963 ierr = KSPChebyshevSetEigenvalues(smoother, emax, emin);CHKERRQ(ierr); 964 } /* setup checby flag */ 965 } /* non-coarse levels */ 966 967 /* clean up */ 968 for (level=1; level<pc_gamg->Nlevels; level++) { 969 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 970 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 971 } 972 973 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 974 975 if (PETSC_TRUE) { 976 KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */ 977 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 978 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 979 } 980 } else { 981 KSP smoother; 982 if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__); 983 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 984 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 985 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 986 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 987 } 988 PetscFunctionReturn(0); 989 } 990 991 /* ------------------------------------------------------------------------- */ 992 /* 993 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 994 that was created with PCCreate_GAMG(). 995 996 Input Parameter: 997 . pc - the preconditioner context 998 999 Application Interface Routine: PCDestroy() 1000 */ 1001 #undef __FUNCT__ 1002 #define __FUNCT__ "PCDestroy_GAMG" 1003 PetscErrorCode PCDestroy_GAMG(PC pc) 1004 { 1005 PetscErrorCode ierr; 1006 PC_MG *mg = (PC_MG*)pc->data; 1007 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 1008 1009 PetscFunctionBegin; 1010 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 1011 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 1012 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 1013 PetscFunctionReturn(0); 1014 } 1015 1016 1017 #undef __FUNCT__ 1018 #define __FUNCT__ "PCGAMGSetProcEqLim" 1019 /*@ 1020 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via 1021 processor reduction. 1022 1023 Not Collective on PC 1024 1025 Input Parameters: 1026 . pc - the preconditioner context 1027 1028 1029 Options Database Key: 1030 . -pc_gamg_process_eq_limit 1031 1032 Level: intermediate 1033 1034 Concepts: Unstructured multrigrid preconditioner 1035 1036 .seealso: () 1037 @*/ 1038 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 1039 { 1040 PetscErrorCode ierr; 1041 1042 PetscFunctionBegin; 1043 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1044 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1045 PetscFunctionReturn(0); 1046 } 1047 1048 EXTERN_C_BEGIN 1049 #undef __FUNCT__ 1050 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 1051 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 1052 { 1053 PC_MG *mg = (PC_MG*)pc->data; 1054 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1055 1056 PetscFunctionBegin; 1057 if (n>0) pc_gamg->min_eq_proc = n; 1058 PetscFunctionReturn(0); 1059 } 1060 EXTERN_C_END 1061 1062 #undef __FUNCT__ 1063 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 1064 /*@ 1065 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 1066 1067 Collective on PC 1068 1069 Input Parameters: 1070 . pc - the preconditioner context 1071 1072 1073 Options Database Key: 1074 . -pc_gamg_coarse_eq_limit 1075 1076 Level: intermediate 1077 1078 Concepts: Unstructured multrigrid preconditioner 1079 1080 .seealso: () 1081 @*/ 1082 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 1083 { 1084 PetscErrorCode ierr; 1085 1086 PetscFunctionBegin; 1087 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1088 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1089 PetscFunctionReturn(0); 1090 } 1091 1092 EXTERN_C_BEGIN 1093 #undef __FUNCT__ 1094 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 1095 PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 1096 { 1097 PC_MG *mg = (PC_MG*)pc->data; 1098 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1099 1100 PetscFunctionBegin; 1101 if (n>0) pc_gamg->coarse_eq_limit = n; 1102 PetscFunctionReturn(0); 1103 } 1104 EXTERN_C_END 1105 1106 #undef __FUNCT__ 1107 #define __FUNCT__ "PCGAMGSetRepartitioning" 1108 /*@ 1109 PCGAMGSetRepartitioning - Repartition the coarse grids 1110 1111 Collective on PC 1112 1113 Input Parameters: 1114 . pc - the preconditioner context 1115 1116 1117 Options Database Key: 1118 . -pc_gamg_repartition 1119 1120 Level: intermediate 1121 1122 Concepts: Unstructured multrigrid preconditioner 1123 1124 .seealso: () 1125 @*/ 1126 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 1127 { 1128 PetscErrorCode ierr; 1129 1130 PetscFunctionBegin; 1131 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1132 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1133 PetscFunctionReturn(0); 1134 } 1135 1136 EXTERN_C_BEGIN 1137 #undef __FUNCT__ 1138 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 1139 PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 1140 { 1141 PC_MG *mg = (PC_MG*)pc->data; 1142 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1143 1144 PetscFunctionBegin; 1145 pc_gamg->repart = n; 1146 PetscFunctionReturn(0); 1147 } 1148 EXTERN_C_END 1149 1150 #undef __FUNCT__ 1151 #define __FUNCT__ "PCGAMGSetReuseProl" 1152 /*@ 1153 PCGAMGSetReuseProl - Reuse prlongation 1154 1155 Collective on PC 1156 1157 Input Parameters: 1158 . pc - the preconditioner context 1159 1160 1161 Options Database Key: 1162 . -pc_gamg_reuse_interpolation 1163 1164 Level: intermediate 1165 1166 Concepts: Unstructured multrigrid preconditioner 1167 1168 .seealso: () 1169 @*/ 1170 PetscErrorCode PCGAMGSetReuseProl(PC pc, PetscBool n) 1171 { 1172 PetscErrorCode ierr; 1173 1174 PetscFunctionBegin; 1175 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1176 ierr = PetscTryMethod(pc,"PCGAMGSetReuseProl_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1177 PetscFunctionReturn(0); 1178 } 1179 1180 EXTERN_C_BEGIN 1181 #undef __FUNCT__ 1182 #define __FUNCT__ "PCGAMGSetReuseProl_GAMG" 1183 PetscErrorCode PCGAMGSetReuseProl_GAMG(PC pc, PetscBool n) 1184 { 1185 PC_MG *mg = (PC_MG*)pc->data; 1186 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1187 1188 PetscFunctionBegin; 1189 pc_gamg->reuse_prol = n; 1190 PetscFunctionReturn(0); 1191 } 1192 EXTERN_C_END 1193 1194 #undef __FUNCT__ 1195 #define __FUNCT__ "PCGAMGSetUseASMAggs" 1196 /*@ 1197 PCGAMGSetUseASMAggs - 1198 1199 Collective on PC 1200 1201 Input Parameters: 1202 . pc - the preconditioner context 1203 1204 1205 Options Database Key: 1206 . -pc_gamg_use_agg_gasm 1207 1208 Level: intermediate 1209 1210 Concepts: Unstructured multrigrid preconditioner 1211 1212 .seealso: () 1213 @*/ 1214 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 1215 { 1216 PetscErrorCode ierr; 1217 1218 PetscFunctionBegin; 1219 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1220 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1221 PetscFunctionReturn(0); 1222 } 1223 1224 EXTERN_C_BEGIN 1225 #undef __FUNCT__ 1226 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 1227 PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 1228 { 1229 PC_MG *mg = (PC_MG*)pc->data; 1230 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1231 1232 PetscFunctionBegin; 1233 pc_gamg->use_aggs_in_gasm = n; 1234 PetscFunctionReturn(0); 1235 } 1236 EXTERN_C_END 1237 1238 #undef __FUNCT__ 1239 #define __FUNCT__ "PCGAMGSetNlevels" 1240 /*@ 1241 PCGAMGSetNlevels - 1242 1243 Not collective on PC 1244 1245 Input Parameters: 1246 . pc - the preconditioner context 1247 1248 1249 Options Database Key: 1250 . -pc_mg_levels 1251 1252 Level: intermediate 1253 1254 Concepts: Unstructured multrigrid preconditioner 1255 1256 .seealso: () 1257 @*/ 1258 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1259 { 1260 PetscErrorCode ierr; 1261 1262 PetscFunctionBegin; 1263 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1264 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1265 PetscFunctionReturn(0); 1266 } 1267 1268 EXTERN_C_BEGIN 1269 #undef __FUNCT__ 1270 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1271 PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1272 { 1273 PC_MG *mg = (PC_MG*)pc->data; 1274 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1275 1276 PetscFunctionBegin; 1277 pc_gamg->Nlevels = n; 1278 PetscFunctionReturn(0); 1279 } 1280 EXTERN_C_END 1281 1282 #undef __FUNCT__ 1283 #define __FUNCT__ "PCGAMGSetThreshold" 1284 /*@ 1285 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1286 1287 Not collective on PC 1288 1289 Input Parameters: 1290 . pc - the preconditioner context 1291 1292 1293 Options Database Key: 1294 . -pc_gamg_threshold 1295 1296 Level: intermediate 1297 1298 Concepts: Unstructured multrigrid preconditioner 1299 1300 .seealso: () 1301 @*/ 1302 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1303 { 1304 PetscErrorCode ierr; 1305 1306 PetscFunctionBegin; 1307 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1308 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1309 PetscFunctionReturn(0); 1310 } 1311 1312 EXTERN_C_BEGIN 1313 #undef __FUNCT__ 1314 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1315 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1316 { 1317 PC_MG *mg = (PC_MG*)pc->data; 1318 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1319 1320 PetscFunctionBegin; 1321 pc_gamg->threshold = n; 1322 PetscFunctionReturn(0); 1323 } 1324 EXTERN_C_END 1325 1326 #undef __FUNCT__ 1327 #define __FUNCT__ "PCGAMGSetType" 1328 /*@ 1329 PCGAMGSetType - Set solution method - calls sub create method 1330 1331 Collective on PC 1332 1333 Input Parameters: 1334 . pc - the preconditioner context 1335 1336 1337 Options Database Key: 1338 . -pc_gamg_type 1339 1340 Level: intermediate 1341 1342 Concepts: Unstructured multrigrid preconditioner 1343 1344 .seealso: () 1345 @*/ 1346 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1347 { 1348 PetscErrorCode ierr; 1349 1350 PetscFunctionBegin; 1351 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1352 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1353 PetscFunctionReturn(0); 1354 } 1355 1356 EXTERN_C_BEGIN 1357 #undef __FUNCT__ 1358 #define __FUNCT__ "PCGAMGSetType_GAMG" 1359 PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1360 { 1361 PetscErrorCode ierr,(*r)(PC); 1362 1363 PetscFunctionBegin; 1364 ierr = PetscFunctionListFind(PetscObjectComm((PetscObject)pc),GAMGList,type,PETSC_FALSE,(PetscVoidStarFunction)&r);CHKERRQ(ierr); 1365 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1366 ierr = (*r)(pc);CHKERRQ(ierr); 1367 PetscFunctionReturn(0); 1368 } 1369 EXTERN_C_END 1370 1371 #undef __FUNCT__ 1372 #define __FUNCT__ "PCSetFromOptions_GAMG" 1373 PetscErrorCode PCSetFromOptions_GAMG(PC pc) 1374 { 1375 PetscErrorCode ierr; 1376 PC_MG *mg = (PC_MG*)pc->data; 1377 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1378 PetscBool flag; 1379 PetscInt two = 2; 1380 MPI_Comm wcomm; 1381 1382 PetscFunctionBegin; 1383 ierr = PetscObjectGetComm((PetscObject)pc,&wcomm);CHKERRQ(ierr); 1384 ierr = PetscOptionsHead("GAMG options");CHKERRQ(ierr); 1385 { 1386 /* -pc_gamg_verbose */ 1387 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG", 1388 "none", pc_gamg->verbose, 1389 &pc_gamg->verbose, NULL);CHKERRQ(ierr); 1390 /* -pc_gamg_repartition */ 1391 ierr = PetscOptionsBool("-pc_gamg_repartition", 1392 "Repartion coarse grids (false)", 1393 "PCGAMGRepartitioning", 1394 pc_gamg->repart, 1395 &pc_gamg->repart, 1396 &flag);CHKERRQ(ierr); 1397 /* -pc_gamg_reuse_interpolation */ 1398 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation", 1399 "Reuse prolongation operator (true)", 1400 "PCGAMGReuseProl", 1401 pc_gamg->reuse_prol, 1402 &pc_gamg->reuse_prol, 1403 &flag);CHKERRQ(ierr); 1404 /* -pc_gamg_use_agg_gasm */ 1405 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm", 1406 "Use aggregation agragates for GASM smoother (false)", 1407 "PCGAMGUseASMAggs", 1408 pc_gamg->use_aggs_in_gasm, 1409 &pc_gamg->use_aggs_in_gasm, 1410 &flag);CHKERRQ(ierr); 1411 /* -pc_gamg_process_eq_limit */ 1412 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit", 1413 "Limit (goal) on number of equations per process on coarse grids", 1414 "PCGAMGSetProcEqLim", 1415 pc_gamg->min_eq_proc, 1416 &pc_gamg->min_eq_proc, 1417 &flag);CHKERRQ(ierr); 1418 /* -pc_gamg_coarse_eq_limit */ 1419 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit", 1420 "Limit on number of equations for the coarse grid", 1421 "PCGAMGSetCoarseEqLim", 1422 pc_gamg->coarse_eq_limit, 1423 &pc_gamg->coarse_eq_limit, 1424 &flag);CHKERRQ(ierr); 1425 /* -pc_gamg_threshold */ 1426 ierr = PetscOptionsReal("-pc_gamg_threshold", 1427 "Relative threshold to use for dropping edges in aggregation graph", 1428 "PCGAMGSetThreshold", 1429 pc_gamg->threshold, 1430 &pc_gamg->threshold, 1431 &flag);CHKERRQ(ierr); 1432 if (flag && pc_gamg->verbose) { 1433 ierr = PetscPrintf(wcomm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);CHKERRQ(ierr); 1434 } 1435 1436 ierr = PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);CHKERRQ(ierr); 1437 ierr = PetscOptionsInt("-pc_mg_levels", 1438 "Set number of MG levels", 1439 "PCGAMGSetNlevels", 1440 pc_gamg->Nlevels, 1441 &pc_gamg->Nlevels, 1442 &flag);CHKERRQ(ierr); 1443 } 1444 ierr = PetscOptionsTail();CHKERRQ(ierr); 1445 PetscFunctionReturn(0); 1446 } 1447 1448 /* -------------------------------------------------------------------------- */ 1449 /*MC 1450 PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework. 1451 - This is the entry point to GAMG, registered in pcregis.c 1452 1453 Options Database Keys: 1454 Multigrid options(inherited) 1455 + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType) 1456 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1457 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1458 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1459 1460 Level: intermediate 1461 1462 Concepts: multigrid 1463 1464 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType, 1465 PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(), 1466 PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(), 1467 PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(), 1468 PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR() 1469 M*/ 1470 EXTERN_C_BEGIN 1471 #undef __FUNCT__ 1472 #define __FUNCT__ "PCCreate_GAMG" 1473 PetscErrorCode PCCreate_GAMG(PC pc) 1474 { 1475 PetscErrorCode ierr; 1476 PC_GAMG *pc_gamg; 1477 PC_MG *mg; 1478 #if defined PETSC_GAMG_USE_LOG 1479 static long count = 0; 1480 #endif 1481 1482 PetscFunctionBegin; 1483 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1484 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1485 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1486 1487 /* create a supporting struct and attach it to pc */ 1488 ierr = PetscNewLog(pc, PC_GAMG, &pc_gamg);CHKERRQ(ierr); 1489 mg = (PC_MG*)pc->data; 1490 mg->galerkin = 2; /* Use Galerkin, but it is computed externally */ 1491 mg->innerctx = pc_gamg; 1492 1493 pc_gamg->setup_count = 0; 1494 /* these should be in subctx but repartitioning needs simple arrays */ 1495 pc_gamg->data_sz = 0; 1496 pc_gamg->data = 0; 1497 1498 /* register AMG type */ 1499 if (!GAMGList) { 1500 ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void (*)(void))PCCreateGAMG_GEO);CHKERRQ(ierr); 1501 ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void (*)(void))PCCreateGAMG_AGG);CHKERRQ(ierr); 1502 } 1503 1504 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1505 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1506 pc->ops->setup = PCSetUp_GAMG; 1507 pc->ops->reset = PCReset_GAMG; 1508 pc->ops->destroy = PCDestroy_GAMG; 1509 1510 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1511 "PCGAMGSetProcEqLim_C", 1512 "PCGAMGSetProcEqLim_GAMG", 1513 PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1514 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1515 "PCGAMGSetCoarseEqLim_C", 1516 "PCGAMGSetCoarseEqLim_GAMG", 1517 PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1518 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1519 "PCGAMGSetRepartitioning_C", 1520 "PCGAMGSetRepartitioning_GAMG", 1521 PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr); 1522 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1523 "PCGAMGSetReuseProl_C", 1524 "PCGAMGSetReuseProl_GAMG", 1525 PCGAMGSetReuseProl_GAMG);CHKERRQ(ierr); 1526 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1527 "PCGAMGSetUseASMAggs_C", 1528 "PCGAMGSetUseASMAggs_GAMG", 1529 PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr); 1530 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1531 "PCGAMGSetThreshold_C", 1532 "PCGAMGSetThreshold_GAMG", 1533 PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1534 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1535 "PCGAMGSetType_C", 1536 "PCGAMGSetType_GAMG", 1537 PCGAMGSetType_GAMG);CHKERRQ(ierr); 1538 pc_gamg->repart = PETSC_FALSE; 1539 pc_gamg->reuse_prol = PETSC_TRUE; 1540 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1541 pc_gamg->min_eq_proc = 50; 1542 pc_gamg->coarse_eq_limit = 800; 1543 pc_gamg->threshold = 0.001; 1544 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1545 pc_gamg->verbose = 0; 1546 pc_gamg->emax_id = -1; 1547 pc_gamg->eigtarget[0] = 0.05; 1548 pc_gamg->eigtarget[1] = 1.05; 1549 1550 /* private events */ 1551 #if defined PETSC_GAMG_USE_LOG 1552 if (count++ == 0) { 1553 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1554 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1555 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1556 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1557 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1558 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1559 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1560 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1561 ierr = PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1562 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1563 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1564 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1565 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1566 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1567 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1568 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1569 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1570 1571 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1572 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1573 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1574 /* create timer stages */ 1575 #if defined GAMG_STAGES 1576 { 1577 char str[32]; 1578 PetscInt lidx; 1579 sprintf(str,"MG Level %d (finest)",0); 1580 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1581 for (lidx=1; lidx<9; lidx++) { 1582 sprintf(str,"MG Level %d",lidx); 1583 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1584 } 1585 } 1586 #endif 1587 } 1588 #endif 1589 /* general events */ 1590 #if defined PETSC_USE_LOG 1591 ierr = PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);CHKERRQ(ierr); 1592 ierr = PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);CHKERRQ(ierr); 1593 ierr = PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1594 ierr = PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1595 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1596 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1597 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);CHKERRQ(ierr); 1598 ierr = PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG);CHKERRQ(ierr); 1599 #endif 1600 1601 /* instantiate derived type */ 1602 ierr = PetscOptionsHead("GAMG options");CHKERRQ(ierr); 1603 { 1604 char tname[256] = GAMGAGG; 1605 ierr = PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType",GAMGList, tname, tname, sizeof(tname), NULL);CHKERRQ(ierr); 1606 ierr = PCGAMGSetType(pc, tname);CHKERRQ(ierr); 1607 } 1608 ierr = PetscOptionsTail();CHKERRQ(ierr); 1609 PetscFunctionReturn(0); 1610 } 1611 EXTERN_C_END 1612