1 2 /* 3 Defines a block Jacobi preconditioner. 4 */ 5 #include <petsc-private/pcimpl.h> /*I "petscpc.h" I*/ 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> 7 8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat); 9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat); 10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC); 11 12 #undef __FUNCT__ 13 #define __FUNCT__ "PCSetUp_BJacobi" 14 static PetscErrorCode PCSetUp_BJacobi(PC pc) 15 { 16 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 17 Mat mat = pc->mat,pmat = pc->pmat; 18 PetscErrorCode ierr,(*f)(Mat,Mat*); 19 PetscInt N,M,start,i,sum,end; 20 PetscInt bs,i_start=-1,i_end=-1; 21 PetscMPIInt rank,size; 22 const char *pprefix,*mprefix; 23 24 PetscFunctionBegin; 25 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 26 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 27 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 28 ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr); 29 30 if (jac->n > 0 && jac->n < size) { 31 ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr); 32 PetscFunctionReturn(0); 33 } 34 35 /* -------------------------------------------------------------------------- 36 Determines the number of blocks assigned to each processor 37 -----------------------------------------------------------------------------*/ 38 39 /* local block count given */ 40 if (jac->n_local > 0 && jac->n < 0) { 41 ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 42 if (jac->l_lens) { /* check that user set these correctly */ 43 sum = 0; 44 for (i=0; i<jac->n_local; i++) { 45 if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 46 sum += jac->l_lens[i]; 47 } 48 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly"); 49 } else { 50 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 51 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 52 } 53 } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */ 54 /* global blocks given: determine which ones are local */ 55 if (jac->g_lens) { 56 /* check if the g_lens is has valid entries */ 57 for (i=0; i<jac->n; i++) { 58 if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed"); 59 if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 60 } 61 if (size == 1) { 62 jac->n_local = jac->n; 63 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 64 ierr = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 65 /* check that user set these correctly */ 66 sum = 0; 67 for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i]; 68 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly"); 69 } else { 70 ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr); 71 /* loop over blocks determing first one owned by me */ 72 sum = 0; 73 for (i=0; i<jac->n+1; i++) { 74 if (sum == start) { i_start = i; goto start_1;} 75 if (i < jac->n) sum += jac->g_lens[i]; 76 } 77 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 78 start_1: 79 for (i=i_start; i<jac->n+1; i++) { 80 if (sum == end) { i_end = i; goto end_1; } 81 if (i < jac->n) sum += jac->g_lens[i]; 82 } 83 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 84 end_1: 85 jac->n_local = i_end - i_start; 86 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 87 ierr = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 88 } 89 } else { /* no global blocks given, determine then using default layout */ 90 jac->n_local = jac->n/size + ((jac->n % size) > rank); 91 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 92 for (i=0; i<jac->n_local; i++) { 93 jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs; 94 if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given"); 95 } 96 } 97 } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */ 98 jac->n = size; 99 jac->n_local = 1; 100 ierr = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 101 jac->l_lens[0] = M; 102 } else { /* jac->n > 0 && jac->n_local > 0 */ 103 if (!jac->l_lens) { 104 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 105 for (i=0; i<jac->n_local; i++) jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 106 } 107 } 108 if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors"); 109 110 /* ------------------------- 111 Determines mat and pmat 112 ---------------------------*/ 113 ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 114 if (!f && size == 1) { 115 mat = pc->mat; 116 pmat = pc->pmat; 117 } else { 118 if (jac->use_true_local) { 119 /* use block from true matrix, not preconditioner matrix for local MatMult() */ 120 ierr = MatGetDiagonalBlock(pc->mat,&mat);CHKERRQ(ierr); 121 /* make submatrix have same prefix as entire matrix */ 122 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr); 123 ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr); 124 } 125 if (pc->pmat != pc->mat || !jac->use_true_local) { 126 ierr = MatGetDiagonalBlock(pc->pmat,&pmat);CHKERRQ(ierr); 127 /* make submatrix have same prefix as entire matrix */ 128 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 129 ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr); 130 } else pmat = mat; 131 } 132 133 /* ------ 134 Setup code depends on the number of blocks 135 */ 136 if (jac->n_local == 1) { 137 ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr); 138 } else { 139 ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr); 140 } 141 PetscFunctionReturn(0); 142 } 143 144 /* Default destroy, if it has never been setup */ 145 #undef __FUNCT__ 146 #define __FUNCT__ "PCDestroy_BJacobi" 147 static PetscErrorCode PCDestroy_BJacobi(PC pc) 148 { 149 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 150 PetscErrorCode ierr; 151 152 PetscFunctionBegin; 153 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 154 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 155 ierr = PetscFree(pc->data);CHKERRQ(ierr); 156 PetscFunctionReturn(0); 157 } 158 159 #undef __FUNCT__ 160 #define __FUNCT__ "PCSetFromOptions_BJacobi" 161 162 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc) 163 { 164 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 165 PetscErrorCode ierr; 166 PetscInt blocks; 167 PetscBool flg; 168 169 PetscFunctionBegin; 170 ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr); 171 ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr); 172 if (flg) { 173 ierr = PCBJacobiSetTotalBlocks(pc,blocks,NULL);CHKERRQ(ierr); 174 } 175 flg = PETSC_FALSE; 176 ierr = PetscOptionsBool("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",flg,&flg,NULL);CHKERRQ(ierr); 177 if (flg) { 178 ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr); 179 } 180 ierr = PetscOptionsTail();CHKERRQ(ierr); 181 PetscFunctionReturn(0); 182 } 183 184 #include <petscdraw.h> 185 #undef __FUNCT__ 186 #define __FUNCT__ "PCView_BJacobi" 187 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer) 188 { 189 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 190 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 191 PetscErrorCode ierr; 192 PetscMPIInt rank; 193 PetscInt i; 194 PetscBool iascii,isstring,isdraw; 195 PetscViewer sviewer; 196 197 PetscFunctionBegin; 198 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 199 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 200 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 201 if (iascii) { 202 if (jac->use_true_local) { 203 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: using true local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr); 204 } 205 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr); 206 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 207 if (jac->same_local_solves) { 208 ierr = PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr); 209 if (jac->ksp && !jac->psubcomm) { 210 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 211 if (!rank) { 212 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 213 ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr); 214 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 215 } 216 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 217 } else if (jac->psubcomm && !jac->psubcomm->color) { 218 ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr); 219 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 220 ierr = KSPView(*(jac->ksp),sviewer);CHKERRQ(ierr); 221 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 222 } 223 } else { 224 PetscInt n_global; 225 ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 226 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 227 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 228 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n", 229 rank,jac->n_local,jac->first_local);CHKERRQ(ierr); 230 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 231 for (i=0; i<n_global; i++) { 232 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 233 if (i < jac->n_local) { 234 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr); 235 ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr); 236 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 237 } 238 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 239 } 240 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 241 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 242 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 243 } 244 } else if (isstring) { 245 ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr); 246 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 247 if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);} 248 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 249 } else if (isdraw) { 250 PetscDraw draw; 251 char str[25]; 252 PetscReal x,y,bottom,h; 253 254 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 255 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 256 ierr = PetscSNPrintf(str,25,"Number blocks %D",jac->n);CHKERRQ(ierr); 257 ierr = PetscDrawBoxedString(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 258 bottom = y - h; 259 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 260 /* warning the communicator on viewer is different then on ksp in parallel */ 261 if (jac->ksp) {ierr = KSPView(jac->ksp[0],viewer);CHKERRQ(ierr);} 262 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /* -------------------------------------------------------------------------------------*/ 268 269 #undef __FUNCT__ 270 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi" 271 static PetscErrorCode PCBJacobiSetUseTrueLocal_BJacobi(PC pc) 272 { 273 PC_BJacobi *jac; 274 275 PetscFunctionBegin; 276 jac = (PC_BJacobi*)pc->data; 277 jac->use_true_local = PETSC_TRUE; 278 PetscFunctionReturn(0); 279 } 280 281 #undef __FUNCT__ 282 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi" 283 static PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp) 284 { 285 PC_BJacobi *jac = (PC_BJacobi*)pc->data;; 286 287 PetscFunctionBegin; 288 if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first"); 289 290 if (n_local) *n_local = jac->n_local; 291 if (first_local) *first_local = jac->first_local; 292 *ksp = jac->ksp; 293 jac->same_local_solves = PETSC_FALSE; /* Assume that local solves are now different; 294 not necessarily true though! This flag is 295 used only for PCView_BJacobi() */ 296 PetscFunctionReturn(0); 297 } 298 299 #undef __FUNCT__ 300 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi" 301 static PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens) 302 { 303 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 304 PetscErrorCode ierr; 305 306 PetscFunctionBegin; 307 if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called"); 308 jac->n = blocks; 309 if (!lens) jac->g_lens = 0; 310 else { 311 ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->g_lens);CHKERRQ(ierr); 312 ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 313 ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 314 } 315 PetscFunctionReturn(0); 316 } 317 318 #undef __FUNCT__ 319 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi" 320 static PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 321 { 322 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 323 324 PetscFunctionBegin; 325 *blocks = jac->n; 326 if (lens) *lens = jac->g_lens; 327 PetscFunctionReturn(0); 328 } 329 330 #undef __FUNCT__ 331 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi" 332 static PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[]) 333 { 334 PC_BJacobi *jac; 335 PetscErrorCode ierr; 336 337 PetscFunctionBegin; 338 jac = (PC_BJacobi*)pc->data; 339 340 jac->n_local = blocks; 341 if (!lens) jac->l_lens = 0; 342 else { 343 ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 344 ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 345 ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 346 } 347 PetscFunctionReturn(0); 348 } 349 350 #undef __FUNCT__ 351 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi" 352 static PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 353 { 354 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 355 356 PetscFunctionBegin; 357 *blocks = jac->n_local; 358 if (lens) *lens = jac->l_lens; 359 PetscFunctionReturn(0); 360 } 361 362 /* -------------------------------------------------------------------------------------*/ 363 364 #undef __FUNCT__ 365 #define __FUNCT__ "PCBJacobiSetUseTrueLocal" 366 /*@ 367 PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block 368 problem is associated with the linear system matrix instead of the 369 default (where it is associated with the preconditioning matrix). 370 That is, if the local system is solved iteratively then it iterates 371 on the block from the matrix using the block from the preconditioner 372 as the preconditioner for the local block. 373 374 Logically Collective on PC 375 376 Input Parameters: 377 . pc - the preconditioner context 378 379 Options Database Key: 380 . -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal() 381 382 Notes: 383 For the common case in which the preconditioning and linear 384 system matrices are identical, this routine is unnecessary. 385 386 Level: intermediate 387 388 .keywords: block, Jacobi, set, true, local, flag 389 390 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks() 391 @*/ 392 PetscErrorCode PCBJacobiSetUseTrueLocal(PC pc) 393 { 394 PetscErrorCode ierr; 395 396 PetscFunctionBegin; 397 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 398 ierr = PetscTryMethod(pc,"PCBJacobiSetUseTrueLocal_C",(PC),(pc));CHKERRQ(ierr); 399 PetscFunctionReturn(0); 400 } 401 402 #undef __FUNCT__ 403 #define __FUNCT__ "PCBJacobiGetSubKSP" 404 /*@C 405 PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on 406 this processor. 407 408 Note Collective 409 410 Input Parameter: 411 . pc - the preconditioner context 412 413 Output Parameters: 414 + n_local - the number of blocks on this processor, or NULL 415 . first_local - the global number of the first block on this processor, or NULL 416 - ksp - the array of KSP contexts 417 418 Notes: 419 After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed. 420 421 Currently for some matrix implementations only 1 block per processor 422 is supported. 423 424 You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP(). 425 426 Fortran Usage: You must pass in a KSP array that is large enough to contain all the local KSPs. 427 You can call PCBJacobiGetSubKSP(pc,nlocal,firstlocal,NULL_OBJECT,ierr) to determine how large the 428 KSP array must be. 429 430 Level: advanced 431 432 .keywords: block, Jacobi, get, sub, KSP, context 433 434 .seealso: PCBJacobiGetSubKSP() 435 @*/ 436 PetscErrorCode PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 437 { 438 PetscErrorCode ierr; 439 440 PetscFunctionBegin; 441 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 442 ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 443 PetscFunctionReturn(0); 444 } 445 446 #undef __FUNCT__ 447 #define __FUNCT__ "PCBJacobiSetTotalBlocks" 448 /*@ 449 PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block 450 Jacobi preconditioner. 451 452 Collective on PC 453 454 Input Parameters: 455 + pc - the preconditioner context 456 . blocks - the number of blocks 457 - lens - [optional] integer array containing the size of each block 458 459 Options Database Key: 460 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks 461 462 Notes: 463 Currently only a limited number of blocking configurations are supported. 464 All processors sharing the PC must call this routine with the same data. 465 466 Level: intermediate 467 468 .keywords: set, number, Jacobi, global, total, blocks 469 470 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks() 471 @*/ 472 PetscErrorCode PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 473 { 474 PetscErrorCode ierr; 475 476 PetscFunctionBegin; 477 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 478 if (blocks <= 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks"); 479 ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr); 480 PetscFunctionReturn(0); 481 } 482 483 #undef __FUNCT__ 484 #define __FUNCT__ "PCBJacobiGetTotalBlocks" 485 /*@C 486 PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block 487 Jacobi preconditioner. 488 489 Not Collective 490 491 Input Parameter: 492 . pc - the preconditioner context 493 494 Output parameters: 495 + blocks - the number of blocks 496 - lens - integer array containing the size of each block 497 498 Level: intermediate 499 500 .keywords: get, number, Jacobi, global, total, blocks 501 502 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks() 503 @*/ 504 PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 505 { 506 PetscErrorCode ierr; 507 508 PetscFunctionBegin; 509 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 510 PetscValidIntPointer(blocks,2); 511 ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 512 PetscFunctionReturn(0); 513 } 514 515 #undef __FUNCT__ 516 #define __FUNCT__ "PCBJacobiSetLocalBlocks" 517 /*@ 518 PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block 519 Jacobi preconditioner. 520 521 Not Collective 522 523 Input Parameters: 524 + pc - the preconditioner context 525 . blocks - the number of blocks 526 - lens - [optional] integer array containing size of each block 527 528 Note: 529 Currently only a limited number of blocking configurations are supported. 530 531 Level: intermediate 532 533 .keywords: PC, set, number, Jacobi, local, blocks 534 535 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks() 536 @*/ 537 PetscErrorCode PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 538 { 539 PetscErrorCode ierr; 540 541 PetscFunctionBegin; 542 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 543 if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks"); 544 ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr); 545 PetscFunctionReturn(0); 546 } 547 548 #undef __FUNCT__ 549 #define __FUNCT__ "PCBJacobiGetLocalBlocks" 550 /*@C 551 PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block 552 Jacobi preconditioner. 553 554 Not Collective 555 556 Input Parameters: 557 + pc - the preconditioner context 558 . blocks - the number of blocks 559 - lens - [optional] integer array containing size of each block 560 561 Note: 562 Currently only a limited number of blocking configurations are supported. 563 564 Level: intermediate 565 566 .keywords: PC, get, number, Jacobi, local, blocks 567 568 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks() 569 @*/ 570 PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 571 { 572 PetscErrorCode ierr; 573 574 PetscFunctionBegin; 575 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 576 PetscValidIntPointer(blocks,2); 577 ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 578 PetscFunctionReturn(0); 579 } 580 581 /* -----------------------------------------------------------------------------------*/ 582 583 /*MC 584 PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with 585 its own KSP object. 586 587 Options Database Keys: 588 . -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal() 589 590 Notes: Each processor can have one or more blocks, but a block cannot be shared by more 591 than one processor. Defaults to one block per processor. 592 593 To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC 594 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 595 596 To set the options on the solvers separate for each block call PCBJacobiGetSubKSP() 597 and set the options directly on the resulting KSP object (you can access its PC 598 KSPGetPC()) 599 600 Level: beginner 601 602 Concepts: block Jacobi 603 604 Developer Notes: This preconditioner does not currently work with CUDA/CUSP for a couple of reasons. 605 (1) It creates seq vectors as work vectors that should be cusp 606 (2) The use of VecPlaceArray() is not handled properly by CUSP (that is it will not know where 607 the ownership of the vector is so may use wrong values) even if it did know the ownership 608 it may induce extra copy ups and downs. Satish suggests a VecTransplantArray() to handle two 609 vectors sharing the same pointer and handling the CUSP side as well instead of VecGetArray()/VecPlaceArray(). 610 611 612 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 613 PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(), 614 PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices() 615 M*/ 616 617 EXTERN_C_BEGIN 618 #undef __FUNCT__ 619 #define __FUNCT__ "PCCreate_BJacobi" 620 PetscErrorCode PCCreate_BJacobi(PC pc) 621 { 622 PetscErrorCode ierr; 623 PetscMPIInt rank; 624 PC_BJacobi *jac; 625 626 PetscFunctionBegin; 627 ierr = PetscNewLog(pc,PC_BJacobi,&jac);CHKERRQ(ierr); 628 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 629 630 pc->ops->apply = 0; 631 pc->ops->applytranspose = 0; 632 pc->ops->setup = PCSetUp_BJacobi; 633 pc->ops->destroy = PCDestroy_BJacobi; 634 pc->ops->setfromoptions = PCSetFromOptions_BJacobi; 635 pc->ops->view = PCView_BJacobi; 636 pc->ops->applyrichardson = 0; 637 638 pc->data = (void*)jac; 639 jac->n = -1; 640 jac->n_local = -1; 641 jac->first_local = rank; 642 jac->ksp = 0; 643 jac->use_true_local = PETSC_FALSE; 644 jac->same_local_solves = PETSC_TRUE; 645 jac->g_lens = 0; 646 jac->l_lens = 0; 647 jac->psubcomm = 0; 648 649 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C","PCBJacobiSetUseTrueLocal_BJacobi",PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr); 650 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi",PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr); 651 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi",PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr); 652 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi",PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr); 653 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi",PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr); 654 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi",PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr); 655 PetscFunctionReturn(0); 656 } 657 EXTERN_C_END 658 659 /* --------------------------------------------------------------------------------------------*/ 660 /* 661 These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI 662 */ 663 #undef __FUNCT__ 664 #define __FUNCT__ "PCReset_BJacobi_Singleblock" 665 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc) 666 { 667 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 668 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 669 PetscErrorCode ierr; 670 671 PetscFunctionBegin; 672 ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr); 673 ierr = VecDestroy(&bjac->x);CHKERRQ(ierr); 674 ierr = VecDestroy(&bjac->y);CHKERRQ(ierr); 675 PetscFunctionReturn(0); 676 } 677 678 #undef __FUNCT__ 679 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock" 680 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc) 681 { 682 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 683 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 684 PetscErrorCode ierr; 685 686 PetscFunctionBegin; 687 ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr); 688 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 689 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 690 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 691 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 692 ierr = PetscFree(bjac);CHKERRQ(ierr); 693 ierr = PetscFree(pc->data);CHKERRQ(ierr); 694 PetscFunctionReturn(0); 695 } 696 697 #undef __FUNCT__ 698 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock" 699 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc) 700 { 701 PetscErrorCode ierr; 702 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 703 704 PetscFunctionBegin; 705 ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr); 706 PetscFunctionReturn(0); 707 } 708 709 #undef __FUNCT__ 710 #define __FUNCT__ "PCApply_BJacobi_Singleblock" 711 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y) 712 { 713 PetscErrorCode ierr; 714 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 715 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 716 PetscScalar *x_array,*y_array; 717 718 PetscFunctionBegin; 719 /* 720 The VecPlaceArray() is to avoid having to copy the 721 y vector into the bjac->x vector. The reason for 722 the bjac->x vector is that we need a sequential vector 723 for the sequential solve. 724 */ 725 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 726 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 727 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 728 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 729 ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 730 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 731 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 732 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 733 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 734 PetscFunctionReturn(0); 735 } 736 737 #undef __FUNCT__ 738 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock" 739 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y) 740 { 741 PetscErrorCode ierr; 742 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 743 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 744 PetscScalar *x_array,*y_array; 745 PC subpc; 746 747 PetscFunctionBegin; 748 /* 749 The VecPlaceArray() is to avoid having to copy the 750 y vector into the bjac->x vector. The reason for 751 the bjac->x vector is that we need a sequential vector 752 for the sequential solve. 753 */ 754 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 755 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 756 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 757 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 758 /* apply the symmetric left portion of the inner PC operator */ 759 /* note this by-passes the inner KSP and its options completely */ 760 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 761 ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 762 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 763 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 764 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 765 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 766 PetscFunctionReturn(0); 767 } 768 769 #undef __FUNCT__ 770 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock" 771 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y) 772 { 773 PetscErrorCode ierr; 774 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 775 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 776 PetscScalar *x_array,*y_array; 777 PC subpc; 778 779 PetscFunctionBegin; 780 /* 781 The VecPlaceArray() is to avoid having to copy the 782 y vector into the bjac->x vector. The reason for 783 the bjac->x vector is that we need a sequential vector 784 for the sequential solve. 785 */ 786 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 787 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 788 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 789 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 790 791 /* apply the symmetric right portion of the inner PC operator */ 792 /* note this by-passes the inner KSP and its options completely */ 793 794 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 795 ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 796 797 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 798 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 799 PetscFunctionReturn(0); 800 } 801 802 #undef __FUNCT__ 803 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock" 804 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y) 805 { 806 PetscErrorCode ierr; 807 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 808 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 809 PetscScalar *x_array,*y_array; 810 811 PetscFunctionBegin; 812 /* 813 The VecPlaceArray() is to avoid having to copy the 814 y vector into the bjac->x vector. The reason for 815 the bjac->x vector is that we need a sequential vector 816 for the sequential solve. 817 */ 818 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 819 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 820 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 821 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 822 ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 823 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 824 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 825 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 826 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 827 PetscFunctionReturn(0); 828 } 829 830 #undef __FUNCT__ 831 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock" 832 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat) 833 { 834 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 835 PetscErrorCode ierr; 836 PetscInt m; 837 KSP ksp; 838 PC_BJacobi_Singleblock *bjac; 839 PetscBool wasSetup = PETSC_TRUE; 840 841 PetscFunctionBegin; 842 if (!pc->setupcalled) { 843 const char *prefix; 844 845 if (!jac->ksp) { 846 wasSetup = PETSC_FALSE; 847 848 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 849 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 850 ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr); 851 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 852 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 853 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 854 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 855 856 pc->ops->reset = PCReset_BJacobi_Singleblock; 857 pc->ops->destroy = PCDestroy_BJacobi_Singleblock; 858 pc->ops->apply = PCApply_BJacobi_Singleblock; 859 pc->ops->applysymmetricleft = PCApplySymmetricLeft_BJacobi_Singleblock; 860 pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock; 861 pc->ops->applytranspose = PCApplyTranspose_BJacobi_Singleblock; 862 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Singleblock; 863 864 ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 865 jac->ksp[0] = ksp; 866 867 ierr = PetscNewLog(pc,PC_BJacobi_Singleblock,&bjac);CHKERRQ(ierr); 868 jac->data = (void*)bjac; 869 } else { 870 ksp = jac->ksp[0]; 871 bjac = (PC_BJacobi_Singleblock*)jac->data; 872 } 873 874 /* 875 The reason we need to generate these vectors is to serve 876 as the right-hand side and solution vector for the solve on the 877 block. We do not need to allocate space for the vectors since 878 that is provided via VecPlaceArray() just before the call to 879 KSPSolve() on the block. 880 */ 881 ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr); 882 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->x);CHKERRQ(ierr); 883 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&bjac->y);CHKERRQ(ierr); 884 ierr = PetscLogObjectParent(pc,bjac->x);CHKERRQ(ierr); 885 ierr = PetscLogObjectParent(pc,bjac->y);CHKERRQ(ierr); 886 } else { 887 ksp = jac->ksp[0]; 888 bjac = (PC_BJacobi_Singleblock*)jac->data; 889 } 890 if (jac->use_true_local) { 891 ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr); 892 } else { 893 ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr); 894 } 895 if (!wasSetup && pc->setfromoptionscalled) { 896 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); 897 } 898 PetscFunctionReturn(0); 899 } 900 901 /* ---------------------------------------------------------------------------------------------*/ 902 #undef __FUNCT__ 903 #define __FUNCT__ "PCReset_BJacobi_Multiblock" 904 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc) 905 { 906 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 907 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 908 PetscErrorCode ierr; 909 PetscInt i; 910 911 PetscFunctionBegin; 912 if (bjac && bjac->pmat) { 913 ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr); 914 if (jac->use_true_local) { 915 ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr); 916 } 917 } 918 919 for (i=0; i<jac->n_local; i++) { 920 ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr); 921 if (bjac && bjac->x) { 922 ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr); 923 ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr); 924 ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr); 925 } 926 } 927 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 928 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 929 PetscFunctionReturn(0); 930 } 931 932 #undef __FUNCT__ 933 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock" 934 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc) 935 { 936 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 937 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 938 PetscErrorCode ierr; 939 PetscInt i; 940 941 PetscFunctionBegin; 942 ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr); 943 if (bjac) { 944 ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr); 945 ierr = PetscFree(bjac->starts);CHKERRQ(ierr); 946 ierr = PetscFree(bjac->is);CHKERRQ(ierr); 947 } 948 ierr = PetscFree(jac->data);CHKERRQ(ierr); 949 for (i=0; i<jac->n_local; i++) { 950 ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr); 951 } 952 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 953 ierr = PetscFree(pc->data);CHKERRQ(ierr); 954 PetscFunctionReturn(0); 955 } 956 957 #undef __FUNCT__ 958 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock" 959 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc) 960 { 961 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 962 PetscErrorCode ierr; 963 PetscInt i,n_local = jac->n_local; 964 965 PetscFunctionBegin; 966 for (i=0; i<n_local; i++) { 967 ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr); 968 } 969 PetscFunctionReturn(0); 970 } 971 972 /* 973 Preconditioner for block Jacobi 974 */ 975 #undef __FUNCT__ 976 #define __FUNCT__ "PCApply_BJacobi_Multiblock" 977 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y) 978 { 979 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 980 PetscErrorCode ierr; 981 PetscInt i,n_local = jac->n_local; 982 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 983 PetscScalar *xin,*yin; 984 985 PetscFunctionBegin; 986 ierr = VecGetArray(x,&xin);CHKERRQ(ierr); 987 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 988 for (i=0; i<n_local; i++) { 989 /* 990 To avoid copying the subvector from x into a workspace we instead 991 make the workspace vector array point to the subpart of the array of 992 the global vector. 993 */ 994 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 995 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 996 997 ierr = PetscLogEventBegin(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 998 ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 999 ierr = PetscLogEventEnd(PC_ApplyOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1000 1001 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 1002 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 1003 } 1004 ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr); 1005 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 1006 PetscFunctionReturn(0); 1007 } 1008 1009 /* 1010 Preconditioner for block Jacobi 1011 */ 1012 #undef __FUNCT__ 1013 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock" 1014 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y) 1015 { 1016 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1017 PetscErrorCode ierr; 1018 PetscInt i,n_local = jac->n_local; 1019 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1020 PetscScalar *xin,*yin; 1021 1022 PetscFunctionBegin; 1023 ierr = VecGetArray(x,&xin);CHKERRQ(ierr); 1024 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 1025 for (i=0; i<n_local; i++) { 1026 /* 1027 To avoid copying the subvector from x into a workspace we instead 1028 make the workspace vector array point to the subpart of the array of 1029 the global vector. 1030 */ 1031 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 1032 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 1033 1034 ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1035 ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 1036 ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1037 1038 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 1039 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 1040 } 1041 ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr); 1042 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 1043 PetscFunctionReturn(0); 1044 } 1045 1046 #undef __FUNCT__ 1047 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock" 1048 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat) 1049 { 1050 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1051 PetscErrorCode ierr; 1052 PetscInt m,n_local,N,M,start,i; 1053 const char *prefix,*pprefix,*mprefix; 1054 KSP ksp; 1055 Vec x,y; 1056 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1057 PC subpc; 1058 IS is; 1059 MatReuse scall; 1060 1061 PetscFunctionBegin; 1062 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 1063 1064 n_local = jac->n_local; 1065 1066 if (jac->use_true_local) { 1067 PetscBool same; 1068 ierr = PetscObjectTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr); 1069 if (!same) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_INCOMP,"Matrices not of same type"); 1070 } 1071 1072 if (!pc->setupcalled) { 1073 scall = MAT_INITIAL_MATRIX; 1074 1075 if (!jac->ksp) { 1076 pc->ops->reset = PCReset_BJacobi_Multiblock; 1077 pc->ops->destroy = PCDestroy_BJacobi_Multiblock; 1078 pc->ops->apply = PCApply_BJacobi_Multiblock; 1079 pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock; 1080 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock; 1081 1082 ierr = PetscNewLog(pc,PC_BJacobi_Multiblock,&bjac);CHKERRQ(ierr); 1083 ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 1084 ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr); 1085 ierr = PetscMalloc2(n_local,Vec,&bjac->x,n_local,Vec,&bjac->y);CHKERRQ(ierr); 1086 ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr); 1087 ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr); 1088 1089 jac->data = (void*)bjac; 1090 ierr = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr); 1091 ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr); 1092 1093 for (i=0; i<n_local; i++) { 1094 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 1095 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 1096 ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr); 1097 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 1098 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 1099 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1100 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 1101 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 1102 1103 jac->ksp[i] = ksp; 1104 } 1105 } else { 1106 bjac = (PC_BJacobi_Multiblock*)jac->data; 1107 } 1108 1109 start = 0; 1110 for (i=0; i<n_local; i++) { 1111 m = jac->l_lens[i]; 1112 /* 1113 The reason we need to generate these vectors is to serve 1114 as the right-hand side and solution vector for the solve on the 1115 block. We do not need to allocate space for the vectors since 1116 that is provided via VecPlaceArray() just before the call to 1117 KSPSolve() on the block. 1118 1119 */ 1120 ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr); 1121 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,m,NULL,&y);CHKERRQ(ierr); 1122 ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr); 1123 ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr); 1124 1125 bjac->x[i] = x; 1126 bjac->y[i] = y; 1127 bjac->starts[i] = start; 1128 1129 ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr); 1130 bjac->is[i] = is; 1131 ierr = PetscLogObjectParent(pc,is);CHKERRQ(ierr); 1132 1133 start += m; 1134 } 1135 } else { 1136 bjac = (PC_BJacobi_Multiblock*)jac->data; 1137 /* 1138 Destroy the blocks from the previous iteration 1139 */ 1140 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1141 ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr); 1142 if (jac->use_true_local) { 1143 ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr); 1144 } 1145 scall = MAT_INITIAL_MATRIX; 1146 } else scall = MAT_REUSE_MATRIX; 1147 } 1148 1149 ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr); 1150 if (jac->use_true_local) { 1151 ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr); 1152 ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr); 1153 } 1154 /* Return control to the user so that the submatrices can be modified (e.g., to apply 1155 different boundary conditions for the submatrices than for the global problem) */ 1156 ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 1157 1158 ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr); 1159 for (i=0; i<n_local; i++) { 1160 ierr = PetscLogObjectParent(pc,bjac->pmat[i]);CHKERRQ(ierr); 1161 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr); 1162 if (jac->use_true_local) { 1163 ierr = PetscLogObjectParent(pc,bjac->mat[i]);CHKERRQ(ierr); 1164 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr); 1165 ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr); 1166 } else { 1167 ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr); 1168 } 1169 if (pc->setfromoptionscalled) { 1170 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 1171 } 1172 } 1173 PetscFunctionReturn(0); 1174 } 1175 1176 /* ---------------------------------------------------------------------------------------------*/ 1177 /* 1178 These are for a single block with multiple processes; 1179 */ 1180 #undef __FUNCT__ 1181 #define __FUNCT__ "PCReset_BJacobi_Multiproc" 1182 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc) 1183 { 1184 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1185 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1186 PetscErrorCode ierr; 1187 1188 PetscFunctionBegin; 1189 ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr); 1190 ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr); 1191 ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr); 1192 if (jac->ksp) {ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr);} 1193 PetscFunctionReturn(0); 1194 } 1195 1196 #undef __FUNCT__ 1197 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc" 1198 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc) 1199 { 1200 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1201 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1202 PetscErrorCode ierr; 1203 1204 PetscFunctionBegin; 1205 ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr); 1206 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 1207 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 1208 ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr); 1209 1210 ierr = PetscFree(mpjac);CHKERRQ(ierr); 1211 ierr = PetscFree(pc->data);CHKERRQ(ierr); 1212 PetscFunctionReturn(0); 1213 } 1214 1215 #undef __FUNCT__ 1216 #define __FUNCT__ "PCApply_BJacobi_Multiproc" 1217 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y) 1218 { 1219 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1220 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1221 PetscErrorCode ierr; 1222 PetscScalar *xarray,*yarray; 1223 1224 PetscFunctionBegin; 1225 /* place x's and y's local arrays into xsub and ysub */ 1226 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1227 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1228 ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr); 1229 ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr); 1230 1231 /* apply preconditioner on each matrix block */ 1232 ierr = PetscLogEventBegin(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1233 ierr = KSPSolve(jac->ksp[0],mpjac->xsub,mpjac->ysub);CHKERRQ(ierr); 1234 ierr = PetscLogEventEnd(PC_ApplyOnMproc,jac->ksp[0],mpjac->xsub,mpjac->ysub,0);CHKERRQ(ierr); 1235 1236 ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr); 1237 ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr); 1238 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1239 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1240 PetscFunctionReturn(0); 1241 } 1242 1243 PETSC_EXTERN PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,MatReuse,Mat*); 1244 #undef __FUNCT__ 1245 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc" 1246 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc) 1247 { 1248 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1249 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1250 PetscErrorCode ierr; 1251 PetscInt m,n; 1252 MPI_Comm comm,subcomm=0; 1253 const char *prefix; 1254 PetscBool wasSetup = PETSC_TRUE; 1255 1256 PetscFunctionBegin; 1257 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1258 if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported"); 1259 jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */ 1260 if (!pc->setupcalled) { 1261 wasSetup = PETSC_FALSE; 1262 ierr = PetscNewLog(pc,PC_BJacobi_Multiproc,&mpjac);CHKERRQ(ierr); 1263 jac->data = (void*)mpjac; 1264 1265 /* initialize datastructure mpjac */ 1266 if (!jac->psubcomm) { 1267 /* Create default contiguous subcommunicatiors if user does not provide them */ 1268 ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr); 1269 ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr); 1270 ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr); 1271 ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 1272 } 1273 mpjac->psubcomm = jac->psubcomm; 1274 subcomm = mpjac->psubcomm->comm; 1275 1276 /* Get matrix blocks of pmat */ 1277 ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1278 1279 /* create a new PC that processors in each subcomm have copy of */ 1280 ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 1281 ierr = KSPCreate(subcomm,&jac->ksp[0]);CHKERRQ(ierr); 1282 ierr = PetscObjectIncrementTabLevel((PetscObject)jac->ksp[0],(PetscObject)pc,1);CHKERRQ(ierr); 1283 ierr = PetscLogObjectParent(pc,jac->ksp[0]);CHKERRQ(ierr); 1284 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr); 1285 ierr = KSPGetPC(jac->ksp[0],&mpjac->pc);CHKERRQ(ierr); 1286 1287 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1288 ierr = KSPSetOptionsPrefix(jac->ksp[0],prefix);CHKERRQ(ierr); 1289 ierr = KSPAppendOptionsPrefix(jac->ksp[0],"sub_");CHKERRQ(ierr); 1290 /* 1291 PetscMPIInt rank,subsize,subrank; 1292 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1293 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 1294 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 1295 1296 ierr = MatGetLocalSize(mpjac->submats,&m,NULL);CHKERRQ(ierr); 1297 ierr = MatGetSize(mpjac->submats,&n,NULL);CHKERRQ(ierr); 1298 ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank); 1299 ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr); 1300 */ 1301 1302 /* create dummy vectors xsub and ysub */ 1303 ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr); 1304 ierr = VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&mpjac->xsub);CHKERRQ(ierr); 1305 ierr = VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&mpjac->ysub);CHKERRQ(ierr); 1306 ierr = PetscLogObjectParent(pc,mpjac->xsub);CHKERRQ(ierr); 1307 ierr = PetscLogObjectParent(pc,mpjac->ysub);CHKERRQ(ierr); 1308 1309 pc->ops->reset = PCReset_BJacobi_Multiproc; 1310 pc->ops->destroy = PCDestroy_BJacobi_Multiproc; 1311 pc->ops->apply = PCApply_BJacobi_Multiproc; 1312 } else { /* pc->setupcalled */ 1313 subcomm = mpjac->psubcomm->comm; 1314 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1315 /* destroy old matrix blocks, then get new matrix blocks */ 1316 if (mpjac->submats) {ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr);} 1317 ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_INITIAL_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1318 } else { 1319 ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,MAT_REUSE_MATRIX,&mpjac->submats);CHKERRQ(ierr); 1320 } 1321 ierr = KSPSetOperators(jac->ksp[0],mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr); 1322 } 1323 1324 if (!wasSetup && pc->setfromoptionscalled) { 1325 ierr = KSPSetFromOptions(jac->ksp[0]);CHKERRQ(ierr); 1326 } 1327 PetscFunctionReturn(0); 1328 } 1329