1 2 /* 3 This file defines a "solve the problem redistributely on each subgroup of processor" preconditioner. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscksp.h> 7 8 typedef struct { 9 KSP ksp; 10 Vec x,b; 11 VecScatter scatter; 12 IS is; 13 PetscInt dcnt,*drows; /* these are the local rows that have only diagonal entry */ 14 PetscScalar *diag; 15 Vec work; 16 } PC_Redistribute; 17 18 static PetscErrorCode PCView_Redistribute(PC pc,PetscViewer viewer) 19 { 20 PC_Redistribute *red = (PC_Redistribute*)pc->data; 21 PetscErrorCode ierr; 22 PetscBool iascii,isstring; 23 PetscInt ncnt,N; 24 25 PetscFunctionBegin; 26 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 27 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 28 if (iascii) { 29 ierr = MPIU_Allreduce(&red->dcnt,&ncnt,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRMPI(ierr); 30 ierr = MatGetSize(pc->pmat,&N,NULL);CHKERRQ(ierr); 31 ierr = PetscViewerASCIIPrintf(viewer," Number rows eliminated %D Percentage rows eliminated %g\n",ncnt,100.0*((PetscReal)ncnt)/((PetscReal)N));CHKERRQ(ierr); 32 ierr = PetscViewerASCIIPrintf(viewer," Redistribute preconditioner: \n");CHKERRQ(ierr); 33 ierr = KSPView(red->ksp,viewer);CHKERRQ(ierr); 34 } else if (isstring) { 35 ierr = PetscViewerStringSPrintf(viewer," Redistribute preconditioner");CHKERRQ(ierr); 36 ierr = KSPView(red->ksp,viewer);CHKERRQ(ierr); 37 } 38 PetscFunctionReturn(0); 39 } 40 41 static PetscErrorCode PCSetUp_Redistribute(PC pc) 42 { 43 PC_Redistribute *red = (PC_Redistribute*)pc->data; 44 PetscErrorCode ierr; 45 MPI_Comm comm; 46 PetscInt rstart,rend,i,nz,cnt,*rows,ncnt,dcnt,*drows; 47 PetscLayout map,nmap; 48 PetscMPIInt size,tag,n; 49 PETSC_UNUSED PetscMPIInt imdex; 50 PetscInt *source = NULL; 51 PetscMPIInt *sizes = NULL,nrecvs; 52 PetscInt j,nsends; 53 PetscInt *owner = NULL,*starts = NULL,count,slen; 54 PetscInt *rvalues,*svalues,recvtotal; 55 PetscMPIInt *onodes1,*olengths1; 56 MPI_Request *send_waits = NULL,*recv_waits = NULL; 57 MPI_Status recv_status,*send_status; 58 Vec tvec,diag; 59 Mat tmat; 60 const PetscScalar *d; 61 62 PetscFunctionBegin; 63 if (pc->setupcalled) { 64 ierr = KSPGetOperators(red->ksp,NULL,&tmat);CHKERRQ(ierr); 65 ierr = MatCreateSubMatrix(pc->pmat,red->is,red->is,MAT_REUSE_MATRIX,&tmat);CHKERRQ(ierr); 66 ierr = KSPSetOperators(red->ksp,tmat,tmat);CHKERRQ(ierr); 67 } else { 68 PetscInt NN; 69 70 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 71 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 72 ierr = PetscObjectGetNewTag((PetscObject)pc,&tag);CHKERRQ(ierr); 73 74 /* count non-diagonal rows on process */ 75 ierr = MatGetOwnershipRange(pc->mat,&rstart,&rend);CHKERRQ(ierr); 76 cnt = 0; 77 for (i=rstart; i<rend; i++) { 78 ierr = MatGetRow(pc->mat,i,&nz,NULL,NULL);CHKERRQ(ierr); 79 if (nz > 1) cnt++; 80 ierr = MatRestoreRow(pc->mat,i,&nz,NULL,NULL);CHKERRQ(ierr); 81 } 82 ierr = PetscMalloc1(cnt,&rows);CHKERRQ(ierr); 83 ierr = PetscMalloc1(rend - rstart - cnt,&drows);CHKERRQ(ierr); 84 85 /* list non-diagonal rows on process */ 86 cnt = 0; dcnt = 0; 87 for (i=rstart; i<rend; i++) { 88 ierr = MatGetRow(pc->mat,i,&nz,NULL,NULL);CHKERRQ(ierr); 89 if (nz > 1) rows[cnt++] = i; 90 else drows[dcnt++] = i - rstart; 91 ierr = MatRestoreRow(pc->mat,i,&nz,NULL,NULL);CHKERRQ(ierr); 92 } 93 94 /* create PetscLayout for non-diagonal rows on each process */ 95 ierr = PetscLayoutCreate(comm,&map);CHKERRQ(ierr); 96 ierr = PetscLayoutSetLocalSize(map,cnt);CHKERRQ(ierr); 97 ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr); 98 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 99 rstart = map->rstart; 100 rend = map->rend; 101 102 /* create PetscLayout for load-balanced non-diagonal rows on each process */ 103 ierr = PetscLayoutCreate(comm,&nmap);CHKERRQ(ierr); 104 ierr = MPIU_Allreduce(&cnt,&ncnt,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr); 105 ierr = PetscLayoutSetSize(nmap,ncnt);CHKERRQ(ierr); 106 ierr = PetscLayoutSetBlockSize(nmap,1);CHKERRQ(ierr); 107 ierr = PetscLayoutSetUp(nmap);CHKERRQ(ierr); 108 109 ierr = MatGetSize(pc->pmat,&NN,NULL);CHKERRQ(ierr); 110 ierr = PetscInfo2(pc,"Number of diagonal rows eliminated %d, percentage eliminated %g\n",NN-ncnt,((PetscReal)(NN-ncnt))/((PetscReal)(NN)));CHKERRQ(ierr); 111 /* 112 this code is taken from VecScatterCreate_PtoS() 113 Determines what rows need to be moved where to 114 load balance the non-diagonal rows 115 */ 116 /* count number of contributors to each processor */ 117 ierr = PetscMalloc2(size,&sizes,cnt,&owner);CHKERRQ(ierr); 118 ierr = PetscArrayzero(sizes,size);CHKERRQ(ierr); 119 j = 0; 120 nsends = 0; 121 for (i=rstart; i<rend; i++) { 122 if (i < nmap->range[j]) j = 0; 123 for (; j<size; j++) { 124 if (i < nmap->range[j+1]) { 125 if (!sizes[j]++) nsends++; 126 owner[i-rstart] = j; 127 break; 128 } 129 } 130 } 131 /* inform other processors of number of messages and max length*/ 132 ierr = PetscGatherNumberOfMessages(comm,NULL,sizes,&nrecvs);CHKERRQ(ierr); 133 ierr = PetscGatherMessageLengths(comm,nsends,nrecvs,sizes,&onodes1,&olengths1);CHKERRQ(ierr); 134 ierr = PetscSortMPIIntWithArray(nrecvs,onodes1,olengths1);CHKERRQ(ierr); 135 recvtotal = 0; for (i=0; i<nrecvs; i++) recvtotal += olengths1[i]; 136 137 /* post receives: rvalues - rows I will own; count - nu */ 138 ierr = PetscMalloc3(recvtotal,&rvalues,nrecvs,&source,nrecvs,&recv_waits);CHKERRQ(ierr); 139 count = 0; 140 for (i=0; i<nrecvs; i++) { 141 ierr = MPI_Irecv((rvalues+count),olengths1[i],MPIU_INT,onodes1[i],tag,comm,recv_waits+i);CHKERRMPI(ierr); 142 count += olengths1[i]; 143 } 144 145 /* do sends: 146 1) starts[i] gives the starting index in svalues for stuff going to 147 the ith processor 148 */ 149 ierr = PetscMalloc3(cnt,&svalues,nsends,&send_waits,size,&starts);CHKERRQ(ierr); 150 starts[0] = 0; 151 for (i=1; i<size; i++) starts[i] = starts[i-1] + sizes[i-1]; 152 for (i=0; i<cnt; i++) svalues[starts[owner[i]]++] = rows[i]; 153 for (i=0; i<cnt; i++) rows[i] = rows[i] - rstart; 154 red->drows = drows; 155 red->dcnt = dcnt; 156 ierr = PetscFree(rows);CHKERRQ(ierr); 157 158 starts[0] = 0; 159 for (i=1; i<size; i++) starts[i] = starts[i-1] + sizes[i-1]; 160 count = 0; 161 for (i=0; i<size; i++) { 162 if (sizes[i]) { 163 ierr = MPI_Isend(svalues+starts[i],sizes[i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRMPI(ierr); 164 } 165 } 166 167 /* wait on receives */ 168 count = nrecvs; 169 slen = 0; 170 while (count) { 171 ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRMPI(ierr); 172 /* unpack receives into our local space */ 173 ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRMPI(ierr); 174 slen += n; 175 count--; 176 } 177 if (slen != recvtotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Total message lengths %D not expected %D",slen,recvtotal); 178 179 ierr = ISCreateGeneral(comm,slen,rvalues,PETSC_COPY_VALUES,&red->is);CHKERRQ(ierr); 180 181 /* free up all work space */ 182 ierr = PetscFree(olengths1);CHKERRQ(ierr); 183 ierr = PetscFree(onodes1);CHKERRQ(ierr); 184 ierr = PetscFree3(rvalues,source,recv_waits);CHKERRQ(ierr); 185 ierr = PetscFree2(sizes,owner);CHKERRQ(ierr); 186 if (nsends) { /* wait on sends */ 187 ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr); 188 ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRMPI(ierr); 189 ierr = PetscFree(send_status);CHKERRQ(ierr); 190 } 191 ierr = PetscFree3(svalues,send_waits,starts);CHKERRQ(ierr); 192 ierr = PetscLayoutDestroy(&map);CHKERRQ(ierr); 193 ierr = PetscLayoutDestroy(&nmap);CHKERRQ(ierr); 194 195 ierr = VecCreateMPI(comm,slen,PETSC_DETERMINE,&red->b);CHKERRQ(ierr); 196 ierr = VecDuplicate(red->b,&red->x);CHKERRQ(ierr); 197 ierr = MatCreateVecs(pc->pmat,&tvec,NULL);CHKERRQ(ierr); 198 ierr = VecScatterCreate(tvec,red->is,red->b,NULL,&red->scatter);CHKERRQ(ierr); 199 ierr = VecDestroy(&tvec);CHKERRQ(ierr); 200 ierr = MatCreateSubMatrix(pc->pmat,red->is,red->is,MAT_INITIAL_MATRIX,&tmat);CHKERRQ(ierr); 201 ierr = KSPSetOperators(red->ksp,tmat,tmat);CHKERRQ(ierr); 202 ierr = MatDestroy(&tmat);CHKERRQ(ierr); 203 } 204 205 /* get diagonal portion of matrix */ 206 ierr = PetscFree(red->diag);CHKERRQ(ierr); 207 ierr = PetscMalloc1(red->dcnt,&red->diag);CHKERRQ(ierr); 208 ierr = MatCreateVecs(pc->pmat,&diag,NULL);CHKERRQ(ierr); 209 ierr = MatGetDiagonal(pc->pmat,diag);CHKERRQ(ierr); 210 ierr = VecGetArrayRead(diag,&d);CHKERRQ(ierr); 211 for (i=0; i<red->dcnt; i++) red->diag[i] = 1.0/d[red->drows[i]]; 212 ierr = VecRestoreArrayRead(diag,&d);CHKERRQ(ierr); 213 ierr = VecDestroy(&diag);CHKERRQ(ierr); 214 ierr = KSPSetUp(red->ksp);CHKERRQ(ierr); 215 PetscFunctionReturn(0); 216 } 217 218 static PetscErrorCode PCApply_Redistribute(PC pc,Vec b,Vec x) 219 { 220 PC_Redistribute *red = (PC_Redistribute*)pc->data; 221 PetscErrorCode ierr; 222 PetscInt dcnt = red->dcnt,i; 223 const PetscInt *drows = red->drows; 224 PetscScalar *xwork; 225 const PetscScalar *bwork,*diag = red->diag; 226 227 PetscFunctionBegin; 228 if (!red->work) { 229 ierr = VecDuplicate(b,&red->work);CHKERRQ(ierr); 230 } 231 /* compute the rows of solution that have diagonal entries only */ 232 ierr = VecSet(x,0.0);CHKERRQ(ierr); /* x = diag(A)^{-1} b */ 233 ierr = VecGetArray(x,&xwork);CHKERRQ(ierr); 234 ierr = VecGetArrayRead(b,&bwork);CHKERRQ(ierr); 235 for (i=0; i<dcnt; i++) xwork[drows[i]] = diag[i]*bwork[drows[i]]; 236 ierr = PetscLogFlops(dcnt);CHKERRQ(ierr); 237 ierr = VecRestoreArray(red->work,&xwork);CHKERRQ(ierr); 238 ierr = VecRestoreArrayRead(b,&bwork);CHKERRQ(ierr); 239 /* update the right hand side for the reduced system with diagonal rows (and corresponding columns) removed */ 240 ierr = MatMult(pc->pmat,x,red->work);CHKERRQ(ierr); 241 ierr = VecAYPX(red->work,-1.0,b);CHKERRQ(ierr); /* red->work = b - A x */ 242 243 ierr = VecScatterBegin(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 244 ierr = VecScatterEnd(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 245 ierr = KSPSolve(red->ksp,red->b,red->x);CHKERRQ(ierr); 246 ierr = KSPCheckSolve(red->ksp,pc,red->x);CHKERRQ(ierr); 247 ierr = VecScatterBegin(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 248 ierr = VecScatterEnd(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 249 PetscFunctionReturn(0); 250 } 251 252 static PetscErrorCode PCDestroy_Redistribute(PC pc) 253 { 254 PC_Redistribute *red = (PC_Redistribute*)pc->data; 255 PetscErrorCode ierr; 256 257 PetscFunctionBegin; 258 ierr = VecScatterDestroy(&red->scatter);CHKERRQ(ierr); 259 ierr = ISDestroy(&red->is);CHKERRQ(ierr); 260 ierr = VecDestroy(&red->b);CHKERRQ(ierr); 261 ierr = VecDestroy(&red->x);CHKERRQ(ierr); 262 ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr); 263 ierr = VecDestroy(&red->work);CHKERRQ(ierr); 264 ierr = PetscFree(red->drows);CHKERRQ(ierr); 265 ierr = PetscFree(red->diag);CHKERRQ(ierr); 266 ierr = PetscFree(pc->data);CHKERRQ(ierr); 267 PetscFunctionReturn(0); 268 } 269 270 static PetscErrorCode PCSetFromOptions_Redistribute(PetscOptionItems *PetscOptionsObject,PC pc) 271 { 272 PetscErrorCode ierr; 273 PC_Redistribute *red = (PC_Redistribute*)pc->data; 274 275 PetscFunctionBegin; 276 ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr); 277 PetscFunctionReturn(0); 278 } 279 280 /*@ 281 PCRedistributeGetKSP - Gets the KSP created by the PCREDISTRIBUTE 282 283 Not Collective 284 285 Input Parameter: 286 . pc - the preconditioner context 287 288 Output Parameter: 289 . innerksp - the inner KSP 290 291 Level: advanced 292 293 @*/ 294 PetscErrorCode PCRedistributeGetKSP(PC pc,KSP *innerksp) 295 { 296 PC_Redistribute *red = (PC_Redistribute*)pc->data; 297 298 PetscFunctionBegin; 299 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 300 PetscValidPointer(innerksp,2); 301 *innerksp = red->ksp; 302 PetscFunctionReturn(0); 303 } 304 305 /* -------------------------------------------------------------------------------------*/ 306 /*MC 307 PCREDISTRIBUTE - Redistributes a matrix for load balancing, removing the rows that only have a diagonal entry and then applys a KSP to that new matrix 308 309 Options for the redistribute preconditioners can be set with -redistribute_ksp_xxx <values> and -redistribute_pc_xxx <values> 310 311 Notes: 312 Usually run this with -ksp_type preonly 313 314 If you have used MatZeroRows() to eliminate (for example, Dirichlet) boundary conditions for a symmetric problem then you can use, for example, -ksp_type preonly 315 -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc to take advantage of the symmetry. 316 317 This does NOT call a partitioner to reorder rows to lower communication; the ordering of the rows in the original matrix and redistributed matrix is the same. 318 319 Developer Notes: 320 Should add an option to this preconditioner to use a partitioner to redistribute the rows to lower communication. 321 322 Level: intermediate 323 324 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCRedistributeGetKSP() 325 M*/ 326 327 PETSC_EXTERN PetscErrorCode PCCreate_Redistribute(PC pc) 328 { 329 PetscErrorCode ierr; 330 PC_Redistribute *red; 331 const char *prefix; 332 333 PetscFunctionBegin; 334 ierr = PetscNewLog(pc,&red);CHKERRQ(ierr); 335 pc->data = (void*)red; 336 337 pc->ops->apply = PCApply_Redistribute; 338 pc->ops->applytranspose = NULL; 339 pc->ops->setup = PCSetUp_Redistribute; 340 pc->ops->destroy = PCDestroy_Redistribute; 341 pc->ops->setfromoptions = PCSetFromOptions_Redistribute; 342 pc->ops->view = PCView_Redistribute; 343 344 ierr = KSPCreate(PetscObjectComm((PetscObject)pc),&red->ksp);CHKERRQ(ierr); 345 ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr); 346 ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 347 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr); 348 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 349 ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr); 350 ierr = KSPAppendOptionsPrefix(red->ksp,"redistribute_");CHKERRQ(ierr); 351 PetscFunctionReturn(0); 352 } 353