1 2 /* 3 Defines a block Jacobi preconditioner. 4 */ 5 #include <petsc-private/pcimpl.h> /*I "petscpc.h" I*/ 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> 7 8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat); 9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat); 10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC); 11 12 #undef __FUNCT__ 13 #define __FUNCT__ "PCSetUp_BJacobi" 14 static PetscErrorCode PCSetUp_BJacobi(PC pc) 15 { 16 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 17 Mat mat = pc->mat,pmat = pc->pmat; 18 PetscErrorCode ierr,(*f)(Mat,Mat*); 19 PetscInt N,M,start,i,sum,end; 20 PetscInt bs,i_start=-1,i_end=-1; 21 PetscMPIInt rank,size; 22 const char *pprefix,*mprefix; 23 24 PetscFunctionBegin; 25 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 26 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 27 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 28 ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr); 29 30 if (jac->n > 0 && jac->n < size) { 31 ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr); 32 PetscFunctionReturn(0); 33 } 34 35 /* -------------------------------------------------------------------------- 36 Determines the number of blocks assigned to each processor 37 -----------------------------------------------------------------------------*/ 38 39 /* local block count given */ 40 if (jac->n_local > 0 && jac->n < 0) { 41 ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 42 if (jac->l_lens) { /* check that user set these correctly */ 43 sum = 0; 44 for (i=0; i<jac->n_local; i++) { 45 if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 46 sum += jac->l_lens[i]; 47 } 48 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly"); 49 } else { 50 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 51 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 52 } 53 } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */ 54 /* global blocks given: determine which ones are local */ 55 if (jac->g_lens) { 56 /* check if the g_lens is has valid entries */ 57 for (i=0; i<jac->n; i++) { 58 if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed"); 59 if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 60 } 61 if (size == 1) { 62 jac->n_local = jac->n; 63 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 64 ierr = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 65 /* check that user set these correctly */ 66 sum = 0; 67 for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i]; 68 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly"); 69 } else { 70 ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr); 71 /* loop over blocks determing first one owned by me */ 72 sum = 0; 73 for (i=0; i<jac->n+1; i++) { 74 if (sum == start) { i_start = i; goto start_1;} 75 if (i < jac->n) sum += jac->g_lens[i]; 76 } 77 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 78 start_1: 79 for (i=i_start; i<jac->n+1; i++) { 80 if (sum == end) { i_end = i; goto end_1; } 81 if (i < jac->n) sum += jac->g_lens[i]; 82 } 83 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 84 end_1: 85 jac->n_local = i_end - i_start; 86 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 87 ierr = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 88 } 89 } else { /* no global blocks given, determine then using default layout */ 90 jac->n_local = jac->n/size + ((jac->n % size) > rank); 91 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 92 for (i=0; i<jac->n_local; i++) { 93 jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs; 94 if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given"); 95 } 96 } 97 } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */ 98 jac->n = size; 99 jac->n_local = 1; 100 ierr = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 101 jac->l_lens[0] = M; 102 } else { /* jac->n > 0 && jac->n_local > 0 */ 103 if (!jac->l_lens) { 104 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 105 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 106 } 107 } 108 if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors"); 109 110 /* ------------------------- 111 Determines mat and pmat 112 ---------------------------*/ 113 ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 114 if (!f && size == 1) { 115 mat = pc->mat; 116 pmat = pc->pmat; 117 } else { 118 if (jac->use_true_local) { 119 /* use block from true matrix, not preconditioner matrix for local MatMult() */ 120 ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr); 121 /* make submatrix have same prefix as entire matrix */ 122 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr); 123 ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr); 124 } 125 if (pc->pmat != pc->mat || !jac->use_true_local) { 126 ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr); 127 /* make submatrix have same prefix as entire matrix */ 128 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 129 ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr); 130 } else pmat = mat; 131 } 132 133 /* ------ 134 Setup code depends on the number of blocks 135 */ 136 if (jac->n_local == 1) { 137 ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr); 138 } else { 139 ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr); 140 } 141 PetscFunctionReturn(0); 142 } 143 144 /* Default destroy, if it has never been setup */ 145 #undef __FUNCT__ 146 #define __FUNCT__ "PCDestroy_BJacobi" 147 static PetscErrorCode PCDestroy_BJacobi(PC pc) 148 { 149 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 150 PetscErrorCode ierr; 151 152 PetscFunctionBegin; 153 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 154 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 155 ierr = PetscFree(pc->data);CHKERRQ(ierr); 156 PetscFunctionReturn(0); 157 } 158 159 #undef __FUNCT__ 160 #define __FUNCT__ "PCSetFromOptions_BJacobi" 161 162 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc) 163 { 164 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 165 PetscErrorCode ierr; 166 PetscInt blocks; 167 PetscBool flg; 168 169 PetscFunctionBegin; 170 ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr); 171 ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr); 172 if (flg) { 173 ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr); 174 } 175 flg = PETSC_FALSE; 176 ierr = PetscOptionsBool("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",flg,&flg,NULL);CHKERRQ(ierr); 177 if (flg) { 178 ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr); 179 } 180 ierr = PetscOptionsTail();CHKERRQ(ierr); 181 PetscFunctionReturn(0); 182 } 183 184 #include <petscdraw.h> 185 #undef __FUNCT__ 186 #define __FUNCT__ "PCView_BJacobi" 187 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer) 188 { 189 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 190 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 191 PetscErrorCode ierr; 192 PetscMPIInt rank; 193 PetscInt i; 194 PetscBool iascii,isstring,isdraw; 195 PetscViewer sviewer; 196 197 PetscFunctionBegin; 198 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 199 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 200 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 201 if (iascii) { 202 if (jac->use_true_local) { 203 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: using true local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr); 204 } 205 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr); 206 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 207 if (jac->same_local_solves) { 208 ierr = PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr); 209 if (jac->ksp && !jac->psubcomm) { 210 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 211 if (!rank) { 212 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 213 ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr); 214 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 215 } 216 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 217 } else if (jac->psubcomm && !jac->psubcomm->color) { 218 ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr); 219 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 220 ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr); 221 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 222 } 223 } else { 224 PetscInt n_global; 225 ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 226 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 227 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 228 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n", 229 rank,jac->n_local,jac->first_local);CHKERRQ(ierr); 230 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 231 for (i=0; i<n_global; i++) { 232 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 233 if (i < jac->n_local) { 234 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr); 235 ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr); 236 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 237 } 238 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 239 } 240 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 241 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 242 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 243 } 244 } else if (isstring) { 245 ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr); 246 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 247 if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);} 248 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 249 } else if (isdraw) { 250 PetscDraw draw; 251 char str[25]; 252 PetscReal x,y,bottom,h; 253 254 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 255 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 256 ierr = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr); 257 ierr = PetscDrawBoxedString(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 258 bottom = y - h; 259 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 260 /* warning the communicator on viewer is different then on ksp in parallel */ 261 if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);} 262 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /* -------------------------------------------------------------------------------------*/ 268 269 EXTERN_C_BEGIN 270 #undef __FUNCT__ 271 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi" 272 PetscErrorCode PCBJacobiSetUseTrueLocal_BJacobi(PC pc) 273 { 274 PC_BJacobi *jac; 275 276 PetscFunctionBegin; 277 jac = (PC_BJacobi*)pc->data; 278 jac->use_true_local = PETSC_TRUE; 279 PetscFunctionReturn(0); 280 } 281 EXTERN_C_END 282 283 EXTERN_C_BEGIN 284 #undef __FUNCT__ 285 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi" 286 PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp) 287 { 288 PC_BJacobi *jac = (PC_BJacobi*)pc->data;; 289 290 PetscFunctionBegin; 291 if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first"); 292 293 if (n_local) *n_local = jac->n_local; 294 if (first_local) *first_local = jac->first_local; 295 *ksp = jac->ksp; 296 jac->same_local_solves = PETSC_FALSE; /* Assume that local solves are now different; 297 not necessarily true though! This flag is 298 used only for PCView_BJacobi() */ 299 PetscFunctionReturn(0); 300 } 301 EXTERN_C_END 302 303 EXTERN_C_BEGIN 304 #undef __FUNCT__ 305 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi" 306 PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens) 307 { 308 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 309 PetscErrorCode ierr; 310 311 PetscFunctionBegin; 312 if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called"); 313 jac->n = blocks; 314 if (!lens) jac->g_lens = 0; 315 else { 316 ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->g_lens);CHKERRQ(ierr); 317 ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 318 ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 319 } 320 PetscFunctionReturn(0); 321 } 322 EXTERN_C_END 323 324 EXTERN_C_BEGIN 325 #undef __FUNCT__ 326 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi" 327 PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 328 { 329 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 330 331 PetscFunctionBegin; 332 *blocks = jac->n; 333 if (lens) *lens = jac->g_lens; 334 PetscFunctionReturn(0); 335 } 336 EXTERN_C_END 337 338 EXTERN_C_BEGIN 339 #undef __FUNCT__ 340 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi" 341 PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[]) 342 { 343 PC_BJacobi *jac; 344 PetscErrorCode ierr; 345 346 PetscFunctionBegin; 347 jac = (PC_BJacobi*)pc->data; 348 349 jac->n_local = blocks; 350 if (!lens) jac->l_lens = 0; 351 else { 352 ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 353 ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 354 ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 355 } 356 PetscFunctionReturn(0); 357 } 358 EXTERN_C_END 359 360 EXTERN_C_BEGIN 361 #undef __FUNCT__ 362 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi" 363 PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 364 { 365 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 366 367 PetscFunctionBegin; 368 *blocks = jac->n_local; 369 if (lens) *lens = jac->l_lens; 370 PetscFunctionReturn(0); 371 } 372 EXTERN_C_END 373 374 /* -------------------------------------------------------------------------------------*/ 375 376 #undef __FUNCT__ 377 #define __FUNCT__ "PCBJacobiSetUseTrueLocal" 378 /*@ 379 PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block 380 problem is associated with the linear system matrix instead of the 381 default (where it is associated with the preconditioning matrix). 382 That is, if the local system is solved iteratively then it iterates 383 on the block from the matrix using the block from the preconditioner 384 as the preconditioner for the local block. 385 386 Logically Collective on PC 387 388 Input Parameters: 389 . pc - the preconditioner context 390 391 Options Database Key: 392 . -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal() 393 394 Notes: 395 For the common case in which the preconditioning and linear 396 system matrices are identical, this routine is unnecessary. 397 398 Level: intermediate 399 400 .keywords: block, Jacobi, set, true, local, flag 401 402 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks() 403 @*/ 404 PetscErrorCode PCBJacobiSetUseTrueLocal(PC pc) 405 { 406 PetscErrorCode ierr; 407 408 PetscFunctionBegin; 409 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 410 ierr = PetscTryMethod(pc,"PCBJacobiSetUseTrueLocal_C",(PC),(pc));CHKERRQ(ierr); 411 PetscFunctionReturn(0); 412 } 413 414 #undef __FUNCT__ 415 #define __FUNCT__ "PCBJacobiGetSubKSP" 416 /*@C 417 PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on 418 this processor. 419 420 Note Collective 421 422 Input Parameter: 423 . pc - the preconditioner context 424 425 Output Parameters: 426 + n_local - the number of blocks on this processor, or NULL 427 . first_local - the global number of the first block on this processor, or NULL 428 - ksp - the array of KSP contexts 429 430 Notes: 431 After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed. 432 433 Currently for some matrix implementations only 1 block per processor 434 is supported. 435 436 You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP(). 437 438 Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs. 439 You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,NULL_OBJECT,ierr) to determine how large the 440 KSP array must be. 441 442 Level: advanced 443 444 .keywords: block, Jacobi, get, sub, KSP, context 445 446 .seealso: PCBJacobiGetSubKSP() 447 @*/ 448 PetscErrorCode PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 449 { 450 PetscErrorCode ierr; 451 452 PetscFunctionBegin; 453 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 454 ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 455 PetscFunctionReturn(0); 456 } 457 458 #undef __FUNCT__ 459 #define __FUNCT__ "PCBJacobiSetTotalBlocks" 460 /*@ 461 PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block 462 Jacobi preconditioner. 463 464 Collective on PC 465 466 Input Parameters: 467 + pc - the preconditioner context 468 . blocks - the number of blocks 469 - lens - [optional] integer array containing the size of each block 470 471 Options Database Key: 472 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks 473 474 Notes: 475 Currently only a limited number of blocking configurations are supported. 476 All processors sharing the PC must call this routine with the same data. 477 478 Level: intermediate 479 480 .keywords: set, number, Jacobi, global, total, blocks 481 482 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks() 483 @*/ 484 PetscErrorCode PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 485 { 486 PetscErrorCode ierr; 487 488 PetscFunctionBegin; 489 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 490 if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks"); 491 ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr); 492 PetscFunctionReturn(0); 493 } 494 495 #undef __FUNCT__ 496 #define __FUNCT__ "PCBJacobiGetTotalBlocks" 497 /*@C 498 PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block 499 Jacobi preconditioner. 500 501 Not Collective 502 503 Input Parameter: 504 . pc - the preconditioner context 505 506 Output parameters: 507 + blocks - the number of blocks 508 - lens - integer array containing the size of each block 509 510 Level: intermediate 511 512 .keywords: get, number, Jacobi, global, total, blocks 513 514 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks() 515 @*/ 516 PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 517 { 518 PetscErrorCode ierr; 519 520 PetscFunctionBegin; 521 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 522 PetscValidIntPointer(blocks,2); 523 ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 524 PetscFunctionReturn(0); 525 } 526 527 #undef __FUNCT__ 528 #define __FUNCT__ "PCBJacobiSetLocalBlocks" 529 /*@ 530 PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block 531 Jacobi preconditioner. 532 533 Not Collective 534 535 Input Parameters: 536 + pc - the preconditioner context 537 . blocks - the number of blocks 538 - lens - [optional] integer array containing size of each block 539 540 Note: 541 Currently only a limited number of blocking configurations are supported. 542 543 Level: intermediate 544 545 .keywords: PC, set, number, Jacobi, local, blocks 546 547 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks() 548 @*/ 549 PetscErrorCode PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 550 { 551 PetscErrorCode ierr; 552 553 PetscFunctionBegin; 554 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 555 if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks"); 556 ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr); 557 PetscFunctionReturn(0); 558 } 559 560 #undef __FUNCT__ 561 #define __FUNCT__ "PCBJacobiGetLocalBlocks" 562 /*@C 563 PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block 564 Jacobi preconditioner. 565 566 Not Collective 567 568 Input Parameters: 569 + pc - the preconditioner context 570 . blocks - the number of blocks 571 - lens - [optional] integer array containing size of each block 572 573 Note: 574 Currently only a limited number of blocking configurations are supported. 575 576 Level: intermediate 577 578 .keywords: PC, get, number, Jacobi, local, blocks 579 580 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks() 581 @*/ 582 PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 583 { 584 PetscErrorCode ierr; 585 586 PetscFunctionBegin; 587 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 588 PetscValidIntPointer(blocks,2); 589 ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 590 PetscFunctionReturn(0); 591 } 592 593 /* -----------------------------------------------------------------------------------*/ 594 595 /*MC 596 PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with 597 its own KSP object. 598 599 Options Database Keys: 600 . -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal() 601 602 Notes: Each processor can have one or more blocks, but a block cannot be shared by more 603 than one processor. Defaults to one block per processor. 604 605 To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC 606 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 607 608 To set the options on the solvers separate for each block call PCBJacobiGetSubKSP() 609 and set the options directly on the resulting KSP object (you can access its PC 610 KSPGetPC()) 611 612 Level: beginner 613 614 Concepts: block Jacobi 615 616 Developer Notes: This preconditioner does not currently work with CUDA/CUSP for a couple of reasons. 617 (1) It creates seq vectors as work vectors that should be cusp 618 (2) The use of VecPlaceArray() is not handled properly by CUSP (that is it will not know where 619 the ownership of the vector is so may use wrong values) even if it did know the ownership 620 it may induce extra copy ups and downs. Satish suggests a VecTransplantArray() to handle two 621 vectors sharing the same pointer and handling the CUSP side as well instead of VecGetArray()/VecPlaceArray(). 622 623 624 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 625 PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(), 626 PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices() 627 M*/ 628 629 EXTERN_C_BEGIN 630 #undef __FUNCT__ 631 #define __FUNCT__ "PCCreate_BJacobi" 632 PetscErrorCode PCCreate_BJacobi(PC pc) 633 { 634 PetscErrorCode ierr; 635 PetscMPIInt rank; 636 PC_BJacobi *jac; 637 638 PetscFunctionBegin; 639 ierr = PetscNewLog(pc,PC_BJacobi,&jac);CHKERRQ(ierr); 640 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 641 642 pc->ops->apply = 0; 643 pc->ops->applytranspose = 0; 644 pc->ops->setup = PCSetUp_BJacobi; 645 pc->ops->destroy = PCDestroy_BJacobi; 646 pc->ops->setfromoptions = PCSetFromOptions_BJacobi; 647 pc->ops->view = PCView_BJacobi; 648 pc->ops->applyrichardson = 0; 649 650 pc->data = (void*)jac; 651 jac->n = -1; 652 jac->n_local = -1; 653 jac->first_local = rank; 654 jac->ksp = 0; 655 jac->use_true_local = PETSC_FALSE; 656 jac->same_local_solves = PETSC_TRUE; 657 jac->g_lens = 0; 658 jac->l_lens = 0; 659 jac->psubcomm = 0; 660 661 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C","PCBJacobiSetUseTrueLocal_BJacobi",PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr); 662 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr); 663 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr); 664 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr); 665 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr); 666 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr); 667 PetscFunctionReturn(0); 668 } 669 EXTERN_C_END 670 671 /* --------------------------------------------------------------------------------------------*/ 672 /* 673 These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI 674 */ 675 #undef __FUNCT__ 676 #define __FUNCT__ "PCReset_BJacobi_Singleblock" 677 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc) 678 { 679 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 680 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 681 PetscErrorCode ierr; 682 683 PetscFunctionBegin; 684 ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr); 685 ierr = VecDestroy(&bjac->x);CHKERRQ(ierr); 686 ierr = VecDestroy(&bjac->y);CHKERRQ(ierr); 687 PetscFunctionReturn(0); 688 } 689 690 #undef __FUNCT__ 691 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock" 692 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc) 693 { 694 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 695 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 696 PetscErrorCode ierr; 697 698 PetscFunctionBegin; 699 ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr); 700 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 701 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 702 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 703 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 704 ierr = PetscFree(bjac);CHKERRQ(ierr); 705 ierr = PetscFree(pc->data);CHKERRQ(ierr); 706 PetscFunctionReturn(0); 707 } 708 709 #undef __FUNCT__ 710 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock" 711 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc) 712 { 713 PetscErrorCode ierr; 714 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 715 716 PetscFunctionBegin; 717 ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr); 718 PetscFunctionReturn(0); 719 } 720 721 #undef __FUNCT__ 722 #define __FUNCT__ "PCApply_BJacobi_Singleblock" 723 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y) 724 { 725 PetscErrorCode ierr; 726 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 727 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 728 PetscScalar *x_array,*y_array; 729 730 PetscFunctionBegin; 731 /* 732 The VecPlaceArray() is to avoid having to copy the 733 y vector into the bjac->x vector. The reason for 734 the bjac->x vector is that we need a sequential vector 735 for the sequential solve. 736 */ 737 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 738 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 739 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 740 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 741 ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 742 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 743 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 744 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 745 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 746 PetscFunctionReturn(0); 747 } 748 749 #undef __FUNCT__ 750 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock" 751 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y) 752 { 753 PetscErrorCode ierr; 754 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 755 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 756 PetscScalar *x_array,*y_array; 757 PC subpc; 758 759 PetscFunctionBegin; 760 /* 761 The VecPlaceArray() is to avoid having to copy the 762 y vector into the bjac->x vector. The reason for 763 the bjac->x vector is that we need a sequential vector 764 for the sequential solve. 765 */ 766 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 767 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 768 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 769 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 770 /* apply the symmetric left portion of the inner PC operator */ 771 /* note this by-passes the inner KSP and its options completely */ 772 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 773 ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 774 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 775 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 776 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 777 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 778 PetscFunctionReturn(0); 779 } 780 781 #undef __FUNCT__ 782 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock" 783 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y) 784 { 785 PetscErrorCode ierr; 786 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 787 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 788 PetscScalar *x_array,*y_array; 789 PC subpc; 790 791 PetscFunctionBegin; 792 /* 793 The VecPlaceArray() is to avoid having to copy the 794 y vector into the bjac->x vector. The reason for 795 the bjac->x vector is that we need a sequential vector 796 for the sequential solve. 797 */ 798 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 799 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 800 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 801 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 802 803 /* apply the symmetric right portion of the inner PC operator */ 804 /* note this by-passes the inner KSP and its options completely */ 805 806 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 807 ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 808 809 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 810 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 811 PetscFunctionReturn(0); 812 } 813 814 #undef __FUNCT__ 815 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock" 816 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y) 817 { 818 PetscErrorCode ierr; 819 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 820 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 821 PetscScalar *x_array,*y_array; 822 823 PetscFunctionBegin; 824 /* 825 The VecPlaceArray() is to avoid having to copy the 826 y vector into the bjac->x vector. The reason for 827 the bjac->x vector is that we need a sequential vector 828 for the sequential solve. 829 */ 830 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 831 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 832 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 833 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 834 ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 835 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 836 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 837 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 838 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 839 PetscFunctionReturn(0); 840 } 841 842 #undef __FUNCT__ 843 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock" 844 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat) 845 { 846 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 847 PetscErrorCode ierr; 848 PetscInt m; 849 KSP ksp; 850 PC_BJacobi_Singleblock *bjac; 851 PetscBool wasSetup = PETSC_TRUE; 852 853 PetscFunctionBegin; 854 if (!pc->setupcalled) { 855 const char *prefix; 856 857 if (!jac->ksp) { 858 wasSetup = PETSC_FALSE; 859 860 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 861 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 862 ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr); 863 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 864 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 865 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 866 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 867 868 pc->ops->reset = PCReset_BJacobi_Singleblock; 869 pc->ops->destroy = PCDestroy_BJacobi_Singleblock; 870 pc->ops->apply = PCApply_BJacobi_Singleblock; 871 pc->ops->applysymmetricleft = PCApplySymmetricLeft_BJacobi_Singleblock; 872 pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock; 873 pc->ops->applytranspose = PCApplyTranspose_BJacobi_Singleblock; 874 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Singleblock; 875 876 ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 877 jac->ksp[0] = ksp; 878 879 ierr = PetscNewLog(pc,PC_BJacobi_Singleblock,&bjac);CHKERRQ(ierr); 880 jac->data = (void*)bjac; 881 } else { 882 ksp = jac->ksp[0]; 883 bjac = (PC_BJacobi_Singleblock*)jac->data; 884 } 885 886 /* 887 The reason we need to generate these vectors is to serve 888 as the right-hand side and solution vector for the solve on the 889 block. We do not need to allocate space for the vectors since 890 that is provided via VecPlaceArray() just before the call to 891 KSPSolve() on the block. 892 */ 893 ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr); 894 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr); 895 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr); 896 ierr = PetscLogObjectParent(pc,bjac->x);CHKERRQ(ierr); 897 ierr = PetscLogObjectParent(pc,bjac->y);CHKERRQ(ierr); 898 } else { 899 ksp = jac->ksp[0]; 900 bjac = (PC_BJacobi_Singleblock*)jac->data; 901 } 902 if (jac->use_true_local) { 903 ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr); 904 } else { 905 ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr); 906 } 907 if (!wasSetup && pc->setfromoptionscalled) { 908 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); 909 } 910 PetscFunctionReturn(0); 911 } 912 913 /* ---------------------------------------------------------------------------------------------*/ 914 #undef __FUNCT__ 915 #define __FUNCT__ "PCReset_BJacobi_Multiblock" 916 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc) 917 { 918 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 919 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 920 PetscErrorCode ierr; 921 PetscInt i; 922 923 PetscFunctionBegin; 924 if (bjac && bjac->pmat) { 925 ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr); 926 if (jac->use_true_local) { 927 ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr); 928 } 929 } 930 931 for (i=0; i<jac->n_local; i++) { 932 ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr); 933 if (bjac && bjac->x) { 934 ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr); 935 ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr); 936 ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr); 937 } 938 } 939 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 940 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 941 PetscFunctionReturn(0); 942 } 943 944 #undef __FUNCT__ 945 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock" 946 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc) 947 { 948 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 949 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 950 PetscErrorCode ierr; 951 PetscInt i; 952 953 PetscFunctionBegin; 954 ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr); 955 if (bjac) { 956 ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr); 957 ierr = PetscFree(bjac->starts);CHKERRQ(ierr); 958 ierr = PetscFree(bjac->is);CHKERRQ(ierr); 959 } 960 ierr = PetscFree(jac->data);CHKERRQ(ierr); 961 for (i=0; i<jac->n_local; i++) { 962 ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr); 963 } 964 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 965 ierr = PetscFree(pc->data);CHKERRQ(ierr); 966 PetscFunctionReturn(0); 967 } 968 969 #undef __FUNCT__ 970 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock" 971 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc) 972 { 973 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 974 PetscErrorCode ierr; 975 PetscInt i,n_local = jac->n_local; 976 977 PetscFunctionBegin; 978 for (i=0; i<n_local; i++) { 979 ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr); 980 } 981 PetscFunctionReturn(0); 982 } 983 984 /* 985 Preconditioner for block Jacobi 986 */ 987 #undef __FUNCT__ 988 #define __FUNCT__ "PCApply_BJacobi_Multiblock" 989 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y) 990 { 991 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 992 PetscErrorCode ierr; 993 PetscInt i,n_local = jac->n_local; 994 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 995 PetscScalar *xin,*yin; 996 997 PetscFunctionBegin; 998 ierr = VecGetArray(x,&xin);CHKERRQ(ierr); 999 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 1000 for (i=0; i<n_local; i++) { 1001 /* 1002 To avoid copying the subvector from x into a workspace we instead 1003 make the workspace vector array point to the subpart of the array of 1004 the global vector. 1005 */ 1006 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 1007 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 1008 1009 ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1010 ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 1011 ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1012 1013 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 1014 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 1015 } 1016 ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr); 1017 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 1018 PetscFunctionReturn(0); 1019 } 1020 1021 /* 1022 Preconditioner for block Jacobi 1023 */ 1024 #undef __FUNCT__ 1025 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock" 1026 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y) 1027 { 1028 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1029 PetscErrorCode ierr; 1030 PetscInt i,n_local = jac->n_local; 1031 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1032 PetscScalar *xin,*yin; 1033 1034 PetscFunctionBegin; 1035 ierr = VecGetArray(x,&xin);CHKERRQ(ierr); 1036 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 1037 for (i=0; i<n_local; i++) { 1038 /* 1039 To avoid copying the subvector from x into a workspace we instead 1040 make the workspace vector array point to the subpart of the array of 1041 the global vector. 1042 */ 1043 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 1044 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 1045 1046 ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1047 ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 1048 ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1049 1050 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 1051 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 1052 } 1053 ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr); 1054 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 1055 PetscFunctionReturn(0); 1056 } 1057 1058 #undef __FUNCT__ 1059 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock" 1060 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat) 1061 { 1062 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1063 PetscErrorCode ierr; 1064 PetscInt m,n_local,N,M,start,i; 1065 const char *prefix,*pprefix,*mprefix; 1066 KSP ksp; 1067 Vec x,y; 1068 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1069 PC subpc; 1070 IS is; 1071 MatReuse scall; 1072 1073 PetscFunctionBegin; 1074 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 1075 1076 n_local = jac->n_local; 1077 1078 if (jac->use_true_local) { 1079 PetscBool same; 1080 ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr); 1081 if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type"); 1082 } 1083 1084 if (!pc->setupcalled) { 1085 scall = MAT_INITIAL_MATRIX; 1086 1087 if (!jac->ksp) { 1088 pc->ops->reset = PCReset_BJacobi_Multiblock; 1089 pc->ops->destroy = PCDestroy_BJacobi_Multiblock; 1090 pc->ops->apply = PCApply_BJacobi_Multiblock; 1091 pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock; 1092 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock; 1093 1094 ierr = PetscNewLog(pc,PC_BJacobi_Multiblock,&bjac);CHKERRQ(ierr); 1095 ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 1096 ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr); 1097 ierr = PetscMalloc2(n_local,Vec,&bjac->x,n_local,Vec,&bjac->y);CHKERRQ(ierr); 1098 ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr); 1099 ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr); 1100 1101 jac->data = (void*)bjac; 1102 ierr = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr); 1103 ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr); 1104 1105 for (i=0; i<n_local; i++) { 1106 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 1107 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 1108 ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr); 1109 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 1110 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 1111 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1112 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 1113 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 1114 1115 jac->ksp[i] = ksp; 1116 } 1117 } else { 1118 bjac = (PC_BJacobi_Multiblock*)jac->data; 1119 } 1120 1121 start = 0; 1122 for (i=0; i<n_local; i++) { 1123 m = jac->l_lens[i]; 1124 /* 1125 The reason we need to generate these vectors is to serve 1126 as the right-hand side and solution vector for the solve on the 1127 block. We do not need to allocate space for the vectors since 1128 that is provided via VecPlaceArray() just before the call to 1129 KSPSolve() on the block. 1130 1131 */ 1132 ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr); 1133 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr); 1134 ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr); 1135 ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr); 1136 1137 bjac->x[i] = x; 1138 bjac->y[i] = y; 1139 bjac->starts[i] = start; 1140 1141 ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr); 1142 bjac->is[i] = is; 1143 ierr = PetscLogObjectParent(pc,is);CHKERRQ(ierr); 1144 1145 start += m; 1146 } 1147 } else { 1148 bjac = (PC_BJacobi_Multiblock*)jac->data; 1149 /* 1150 Destroy the blocks from the previous iteration 1151 */ 1152 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1153 ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr); 1154 if (jac->use_true_local) { 1155 ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr); 1156 } 1157 scall = MAT_INITIAL_MATRIX; 1158 } else scall = MAT_REUSE_MATRIX; 1159 } 1160 1161 ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr); 1162 if (jac->use_true_local) { 1163 ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr); 1164 ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr); 1165 } 1166 /* Return control to the user so that the submatrices can be modified (e.g., to apply 1167 different boundary conditions for the submatrices than for the global problem) */ 1168 ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 1169 1170 ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr); 1171 for (i=0; i<n_local; i++) { 1172 ierr = PetscLogObjectParent(pc,bjac->pmat[i]);CHKERRQ(ierr); 1173 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr); 1174 if (jac->use_true_local) { 1175 ierr = PetscLogObjectParent(pc,bjac->mat[i]);CHKERRQ(ierr); 1176 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr); 1177 ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr); 1178 } else { 1179 ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr); 1180 } 1181 if (pc->setfromoptionscalled) { 1182 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 1183 } 1184 } 1185 PetscFunctionReturn(0); 1186 } 1187 1188 /* ---------------------------------------------------------------------------------------------*/ 1189 /* 1190 These are for a single block with multiple processes; 1191 */ 1192 #undef __FUNCT__ 1193 #define __FUNCT__ "PCReset_BJacobi_Multiproc" 1194 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc) 1195 { 1196 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1197 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1198 PetscErrorCode ierr; 1199 1200 PetscFunctionBegin; 1201 ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr); 1202 ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr); 1203 ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr); 1204 if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);} 1205 PetscFunctionReturn(0); 1206 } 1207 1208 #undef __FUNCT__ 1209 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc" 1210 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc) 1211 { 1212 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1213 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1214 PetscErrorCode ierr; 1215 1216 PetscFunctionBegin; 1217 ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr); 1218 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 1219 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 1220 ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr); 1221 1222 ierr = PetscFree(mpjac);CHKERRQ(ierr); 1223 ierr = PetscFree(pc->data);CHKERRQ(ierr); 1224 PetscFunctionReturn(0); 1225 } 1226 1227 #undef __FUNCT__ 1228 #define __FUNCT__ "PCApply_BJacobi_Multiproc" 1229 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y) 1230 { 1231 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1232 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1233 PetscErrorCode ierr; 1234 PetscScalar *xarray,*yarray; 1235 1236 PetscFunctionBegin; 1237 /* place x's and y's local arrays into xsub and ysub */ 1238 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1239 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1240 ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr); 1241 ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr); 1242 1243 /* apply preconditioner on each matrix block */ 1244 ierr = PetscLogEventBegin(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1245 ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr); 1246 ierr = PetscLogEventEnd(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1247 1248 ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr); 1249 ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr); 1250 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1251 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1252 PetscFunctionReturn(0); 1253 } 1254 1255 extern PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,MatReuse,Mat*); 1256 #undef __FUNCT__ 1257 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc" 1258 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc) 1259 { 1260 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1261 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1262 PetscErrorCode ierr; 1263 PetscInt m,n; 1264 MPI_Comm comm,subcomm=0; 1265 const char *prefix; 1266 PetscBool wasSetup = PETSC_TRUE; 1267 1268 PetscFunctionBegin; 1269 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1270 if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported"); 1271 jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */ 1272 if (!pc->setupcalled) { 1273 wasSetup = PETSC_FALSE; 1274 ierr = PetscNewLog(pc,PC_BJacobi_Multiproc,&mpjac);CHKERRQ(ierr); 1275 jac->data = (void*)mpjac; 1276 1277 /* initialize datastructure mpjac */ 1278 if (!jac->psubcomm) { 1279 /* Create default contiguous subcommunicatiors if user does not provide them */ 1280 ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr); 1281 ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr); 1282 ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr); 1283 ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 1284 } 1285 mpjac->psubcomm = jac->psubcomm; 1286 subcomm = mpjac->psubcomm->comm; 1287 1288 /* Get matrix blocks of pmat */ 1289 ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1290 1291 /* create a new PC that processors in each subcomm have copy of */ 1292 ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 1293 ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr); 1294 ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr); 1295 ierr = PetscLogObjectParent(pc,jac->ksp[0]);CHKERRQ(ierr); 1296 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr); 1297 ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr); 1298 1299 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1300 ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr); 1301 ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr); 1302 /* 1303 PetscMPIInt rank,subsize,subrank; 1304 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1305 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 1306 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 1307 1308 ierr = MatGetLocalSize(mpjac->submats,&m,NULL);CHKERRQ(ierr); 1309 ierr = MatGetSize(mpjac->submats,&n,NULL);CHKERRQ(ierr); 1310 ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank); 1311 ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr); 1312 */ 1313 1314 /* create dummy vectors xsub and ysub */ 1315 ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr); 1316 ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr); 1317 ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr); 1318 ierr = PetscLogObjectParent(pc,mpjac->xsub);CHKERRQ(ierr); 1319 ierr = PetscLogObjectParent(pc,mpjac->ysub);CHKERRQ(ierr); 1320 1321 pc->ops->reset = PCReset_BJacobi_Multiproc; 1322 pc->ops->destroy = PCDestroy_BJacobi_Multiproc; 1323 pc->ops->apply = PCApply_BJacobi_Multiproc; 1324 } else { /* pc->setupcalled */ 1325 subcomm = mpjac->psubcomm->comm; 1326 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1327 /* destroy old matrix blocks, then get new matrix blocks */ 1328 if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);} 1329 ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1330 } else { 1331 ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1332 } 1333 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr); 1334 } 1335 1336 if (!wasSetup && pc->setfromoptionscalled) { 1337 ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr); 1338 } 1339 PetscFunctionReturn(0); 1340 } 1341