1 2 /* 3 Defines a block Jacobi preconditioner. 4 */ 5 #include <private/pcimpl.h> /*I "petscpc.h" I*/ 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> 7 8 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC,Mat,Mat); 9 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC,Mat,Mat); 10 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC); 11 12 #undef __FUNCT__ 13 #define __FUNCT__ "PCSetUp_BJacobi" 14 static PetscErrorCode PCSetUp_BJacobi(PC pc) 15 { 16 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 17 Mat mat = pc->mat,pmat = pc->pmat; 18 PetscErrorCode ierr,(*f)(Mat,PetscBool *,MatReuse,Mat*); 19 PetscInt N,M,start,i,sum,end; 20 PetscInt bs,i_start=-1,i_end=-1; 21 PetscMPIInt rank,size; 22 const char *pprefix,*mprefix; 23 24 PetscFunctionBegin; 25 ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr); 26 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 27 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 28 ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr); 29 30 if (jac->n > 0 && jac->n < size){ 31 ierr = PCSetUp_BJacobi_Multiproc(pc);CHKERRQ(ierr); 32 PetscFunctionReturn(0); 33 } 34 35 /* -------------------------------------------------------------------------- 36 Determines the number of blocks assigned to each processor 37 -----------------------------------------------------------------------------*/ 38 39 /* local block count given */ 40 if (jac->n_local > 0 && jac->n < 0) { 41 ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr); 42 if (jac->l_lens) { /* check that user set these correctly */ 43 sum = 0; 44 for (i=0; i<jac->n_local; i++) { 45 if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 46 sum += jac->l_lens[i]; 47 } 48 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local lens set incorrectly"); 49 } else { 50 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 51 for (i=0; i<jac->n_local; i++) { 52 jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 53 } 54 } 55 } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */ 56 /* global blocks given: determine which ones are local */ 57 if (jac->g_lens) { 58 /* check if the g_lens is has valid entries */ 59 for (i=0; i<jac->n; i++) { 60 if (!jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Zero block not allowed"); 61 if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 62 } 63 if (size == 1) { 64 jac->n_local = jac->n; 65 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 66 ierr = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 67 /* check that user set these correctly */ 68 sum = 0; 69 for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i]; 70 if (sum != M) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Global lens set incorrectly"); 71 } else { 72 ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr); 73 /* loop over blocks determing first one owned by me */ 74 sum = 0; 75 for (i=0; i<jac->n+1; i++) { 76 if (sum == start) { i_start = i; goto start_1;} 77 if (i < jac->n) sum += jac->g_lens[i]; 78 } 79 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 80 start_1: 81 for (i=i_start; i<jac->n+1; i++) { 82 if (sum == end) { i_end = i; goto end_1; } 83 if (i < jac->n) sum += jac->g_lens[i]; 84 } 85 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Block sizes used in PCBJacobiSetTotalBlocks()\nare not compatible with parallel matrix layout"); 86 end_1: 87 jac->n_local = i_end - i_start; 88 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 89 ierr = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(PetscInt));CHKERRQ(ierr); 90 } 91 } else { /* no global blocks given, determine then using default layout */ 92 jac->n_local = jac->n/size + ((jac->n % size) > rank); 93 ierr = PetscMalloc(jac->n_local*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 94 for (i=0; i<jac->n_local; i++) { 95 jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs; 96 if (!jac->l_lens[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Too many blocks given"); 97 } 98 } 99 } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */ 100 jac->n = size; 101 jac->n_local = 1; 102 ierr = PetscMalloc(sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 103 jac->l_lens[0] = M; 104 } 105 if (jac->n_local < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Number of blocks is less than number of processors"); 106 107 /* ------------------------- 108 Determines mat and pmat 109 ---------------------------*/ 110 ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 111 if (size == 1 && !f) { 112 mat = pc->mat; 113 pmat = pc->pmat; 114 } else { 115 PetscBool iscopy; 116 MatReuse scall; 117 118 if (jac->use_true_local) { 119 /* use block from true matrix, not preconditioner matrix for local MatMult() */ 120 scall = MAT_INITIAL_MATRIX; 121 if (pc->setupcalled) { 122 if (pc->flag == SAME_NONZERO_PATTERN) { 123 if (jac->tp_mat) { 124 scall = MAT_REUSE_MATRIX; 125 mat = jac->tp_mat; 126 } 127 } else { 128 ierr = MatDestroy(&jac->tp_mat);CHKERRQ(ierr); 129 } 130 } 131 if (!f) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"This matrix does not support getting diagonal block"); 132 ierr = (*f)(pc->mat,&iscopy,scall,&mat);CHKERRQ(ierr); 133 /* make submatrix have same prefix as entire matrix */ 134 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr); 135 ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr); 136 if (iscopy) { 137 jac->tp_mat = mat; 138 } 139 } 140 if (pc->pmat != pc->mat || !jac->use_true_local) { 141 scall = MAT_INITIAL_MATRIX; 142 if (pc->setupcalled) { 143 if (pc->flag == SAME_NONZERO_PATTERN) { 144 if (jac->tp_pmat) { 145 scall = MAT_REUSE_MATRIX; 146 pmat = jac->tp_pmat; 147 } 148 } else { 149 ierr = MatDestroy(&jac->tp_pmat);CHKERRQ(ierr); 150 } 151 } 152 ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 153 if (!f) { 154 const char *type; 155 ierr = PetscObjectGetType((PetscObject) pc->pmat,&type);CHKERRQ(ierr); 156 SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"This matrix type, %s, does not support getting diagonal block", type); 157 } 158 ierr = (*f)(pc->pmat,&iscopy,scall,&pmat);CHKERRQ(ierr); 159 /* make submatrix have same prefix as entire matrix */ 160 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 161 ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr); 162 if (iscopy) { 163 jac->tp_pmat = pmat; 164 } 165 } else { 166 pmat = mat; 167 } 168 } 169 170 /* ------ 171 Setup code depends on the number of blocks 172 */ 173 if (jac->n_local == 1) { 174 ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr); 175 } else { 176 ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr); 177 } 178 PetscFunctionReturn(0); 179 } 180 181 /* Default destroy, if it has never been setup */ 182 #undef __FUNCT__ 183 #define __FUNCT__ "PCDestroy_BJacobi" 184 static PetscErrorCode PCDestroy_BJacobi(PC pc) 185 { 186 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 187 PetscErrorCode ierr; 188 189 PetscFunctionBegin; 190 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 191 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 192 ierr = PetscFree(pc->data);CHKERRQ(ierr); 193 PetscFunctionReturn(0); 194 } 195 196 #undef __FUNCT__ 197 #define __FUNCT__ "PCSetFromOptions_BJacobi" 198 199 static PetscErrorCode PCSetFromOptions_BJacobi(PC pc) 200 { 201 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 202 PetscErrorCode ierr; 203 PetscInt blocks; 204 PetscBool flg; 205 206 PetscFunctionBegin; 207 ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr); 208 ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr); 209 if (flg) { 210 ierr = PCBJacobiSetTotalBlocks(pc,blocks,PETSC_NULL);CHKERRQ(ierr); 211 } 212 flg = PETSC_FALSE; 213 ierr = PetscOptionsBool("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",flg,&flg,PETSC_NULL);CHKERRQ(ierr); 214 if (flg) { 215 ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr); 216 } 217 ierr = PetscOptionsTail();CHKERRQ(ierr); 218 PetscFunctionReturn(0); 219 } 220 221 #undef __FUNCT__ 222 #define __FUNCT__ "PCView_BJacobi" 223 static PetscErrorCode PCView_BJacobi(PC pc,PetscViewer viewer) 224 { 225 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 226 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 227 PetscErrorCode ierr; 228 PetscMPIInt rank; 229 PetscInt i; 230 PetscBool iascii,isstring; 231 PetscViewer sviewer; 232 233 PetscFunctionBegin; 234 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 235 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 236 if (iascii) { 237 if (jac->use_true_local) { 238 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: using true local matrix, number of blocks = %D\n",jac->n);CHKERRQ(ierr); 239 } 240 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: number of blocks = %D\n",jac->n);CHKERRQ(ierr); 241 ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr); 242 if (jac->same_local_solves) { 243 ierr = PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr); 244 if (jac->ksp) { 245 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 246 if (!rank){ 247 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 248 ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr); 249 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 250 } 251 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 252 } else if (jac->psubcomm && !jac->psubcomm->color){ 253 ierr = PetscViewerASCIIGetStdout(mpjac->psubcomm->comm,&sviewer);CHKERRQ(ierr); 254 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 255 ierr = KSPView(mpjac->ksp,sviewer);CHKERRQ(ierr); 256 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 257 } 258 } else { 259 PetscInt n_global; 260 ierr = MPI_Allreduce(&jac->n_local,&n_global,1,MPIU_INT,MPI_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr); 261 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 262 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 263 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] number of local blocks = %D, first local block number = %D\n", 264 rank,jac->n_local,jac->first_local);CHKERRQ(ierr); 265 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 266 for (i=0; i<n_global; i++) { 267 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 268 if (i < jac->n_local) { 269 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] local block number %D\n",rank,i);CHKERRQ(ierr); 270 ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr); 271 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 272 } 273 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 274 } 275 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 276 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 277 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 278 } 279 } else if (isstring) { 280 ierr = PetscViewerStringSPrintf(viewer," blks=%D",jac->n);CHKERRQ(ierr); 281 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 282 if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);} 283 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 284 } else { 285 SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for block Jacobi",((PetscObject)viewer)->type_name); 286 } 287 PetscFunctionReturn(0); 288 } 289 290 /* -------------------------------------------------------------------------------------*/ 291 292 EXTERN_C_BEGIN 293 #undef __FUNCT__ 294 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi" 295 PetscErrorCode PCBJacobiSetUseTrueLocal_BJacobi(PC pc) 296 { 297 PC_BJacobi *jac; 298 299 PetscFunctionBegin; 300 jac = (PC_BJacobi*)pc->data; 301 jac->use_true_local = PETSC_TRUE; 302 PetscFunctionReturn(0); 303 } 304 EXTERN_C_END 305 306 EXTERN_C_BEGIN 307 #undef __FUNCT__ 308 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi" 309 PetscErrorCode PCBJacobiGetSubKSP_BJacobi(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp) 310 { 311 PC_BJacobi *jac = (PC_BJacobi*)pc->data;; 312 313 PetscFunctionBegin; 314 if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first"); 315 316 if (n_local) *n_local = jac->n_local; 317 if (first_local) *first_local = jac->first_local; 318 *ksp = jac->ksp; 319 jac->same_local_solves = PETSC_FALSE; /* Assume that local solves are now different; 320 not necessarily true though! This flag is 321 used only for PCView_BJacobi() */ 322 PetscFunctionReturn(0); 323 } 324 EXTERN_C_END 325 326 EXTERN_C_BEGIN 327 #undef __FUNCT__ 328 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi" 329 PetscErrorCode PCBJacobiSetTotalBlocks_BJacobi(PC pc,PetscInt blocks,PetscInt *lens) 330 { 331 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 332 PetscErrorCode ierr; 333 334 PetscFunctionBegin; 335 336 if (pc->setupcalled > 0 && jac->n!=blocks) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Cannot alter number of blocks after PCSetUp()/KSPSetUp() has been called"); 337 jac->n = blocks; 338 if (!lens) { 339 jac->g_lens = 0; 340 } else { 341 ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->g_lens);CHKERRQ(ierr); 342 ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 343 ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 344 } 345 PetscFunctionReturn(0); 346 } 347 EXTERN_C_END 348 349 EXTERN_C_BEGIN 350 #undef __FUNCT__ 351 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi" 352 PetscErrorCode PCBJacobiGetTotalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 353 { 354 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 355 356 PetscFunctionBegin; 357 *blocks = jac->n; 358 if (lens) *lens = jac->g_lens; 359 PetscFunctionReturn(0); 360 } 361 EXTERN_C_END 362 363 EXTERN_C_BEGIN 364 #undef __FUNCT__ 365 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi" 366 PetscErrorCode PCBJacobiSetLocalBlocks_BJacobi(PC pc,PetscInt blocks,const PetscInt lens[]) 367 { 368 PC_BJacobi *jac; 369 PetscErrorCode ierr; 370 371 PetscFunctionBegin; 372 jac = (PC_BJacobi*)pc->data; 373 374 jac->n_local = blocks; 375 if (!lens) { 376 jac->l_lens = 0; 377 } else { 378 ierr = PetscMalloc(blocks*sizeof(PetscInt),&jac->l_lens);CHKERRQ(ierr); 379 ierr = PetscLogObjectMemory(pc,blocks*sizeof(PetscInt));CHKERRQ(ierr); 380 ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(PetscInt));CHKERRQ(ierr); 381 } 382 PetscFunctionReturn(0); 383 } 384 EXTERN_C_END 385 386 EXTERN_C_BEGIN 387 #undef __FUNCT__ 388 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi" 389 PetscErrorCode PCBJacobiGetLocalBlocks_BJacobi(PC pc, PetscInt *blocks, const PetscInt *lens[]) 390 { 391 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 392 393 PetscFunctionBegin; 394 *blocks = jac->n_local; 395 if (lens) *lens = jac->l_lens; 396 PetscFunctionReturn(0); 397 } 398 EXTERN_C_END 399 400 /* -------------------------------------------------------------------------------------*/ 401 402 #undef __FUNCT__ 403 #define __FUNCT__ "PCBJacobiSetUseTrueLocal" 404 /*@ 405 PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block 406 problem is associated with the linear system matrix instead of the 407 default (where it is associated with the preconditioning matrix). 408 That is, if the local system is solved iteratively then it iterates 409 on the block from the matrix using the block from the preconditioner 410 as the preconditioner for the local block. 411 412 Logically Collective on PC 413 414 Input Parameters: 415 . pc - the preconditioner context 416 417 Options Database Key: 418 . -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal() 419 420 Notes: 421 For the common case in which the preconditioning and linear 422 system matrices are identical, this routine is unnecessary. 423 424 Level: intermediate 425 426 .keywords: block, Jacobi, set, true, local, flag 427 428 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks() 429 @*/ 430 PetscErrorCode PCBJacobiSetUseTrueLocal(PC pc) 431 { 432 PetscErrorCode ierr; 433 434 PetscFunctionBegin; 435 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 436 ierr = PetscTryMethod(pc,"PCBJacobiSetUseTrueLocal_C",(PC),(pc));CHKERRQ(ierr); 437 PetscFunctionReturn(0); 438 } 439 440 #undef __FUNCT__ 441 #define __FUNCT__ "PCBJacobiGetSubKSP" 442 /*@C 443 PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on 444 this processor. 445 446 Note Collective 447 448 Input Parameter: 449 . pc - the preconditioner context 450 451 Output Parameters: 452 + n_local - the number of blocks on this processor, or PETSC_NULL 453 . first_local - the global number of the first block on this processor, or PETSC_NULL 454 - ksp - the array of KSP contexts 455 456 Notes: 457 After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed. 458 459 Currently for some matrix implementations only 1 block per processor 460 is supported. 461 462 You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP(). 463 464 Level: advanced 465 466 .keywords: block, Jacobi, get, sub, KSP, context 467 468 .seealso: PCBJacobiGetSubKSP() 469 @*/ 470 PetscErrorCode PCBJacobiGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 471 { 472 PetscErrorCode ierr; 473 474 PetscFunctionBegin; 475 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 476 ierr = PetscUseMethod(pc,"PCBJacobiGetSubKSP_C",(PC,PetscInt *,PetscInt *,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 477 PetscFunctionReturn(0); 478 } 479 480 #undef __FUNCT__ 481 #define __FUNCT__ "PCBJacobiSetTotalBlocks" 482 /*@ 483 PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block 484 Jacobi preconditioner. 485 486 Collective on PC 487 488 Input Parameters: 489 + pc - the preconditioner context 490 . blocks - the number of blocks 491 - lens - [optional] integer array containing the size of each block 492 493 Options Database Key: 494 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks 495 496 Notes: 497 Currently only a limited number of blocking configurations are supported. 498 All processors sharing the PC must call this routine with the same data. 499 500 Level: intermediate 501 502 .keywords: set, number, Jacobi, global, total, blocks 503 504 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks() 505 @*/ 506 PetscErrorCode PCBJacobiSetTotalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 507 { 508 PetscErrorCode ierr; 509 510 PetscFunctionBegin; 511 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 512 if (blocks <= 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks"); 513 ierr = PetscTryMethod(pc,"PCBJacobiSetTotalBlocks_C",(PC,PetscInt,const PetscInt[]),(pc,blocks,lens));CHKERRQ(ierr); 514 PetscFunctionReturn(0); 515 } 516 517 #undef __FUNCT__ 518 #define __FUNCT__ "PCBJacobiGetTotalBlocks" 519 /*@C 520 PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block 521 Jacobi preconditioner. 522 523 Not Collective 524 525 Input Parameter: 526 . pc - the preconditioner context 527 528 Output parameters: 529 + blocks - the number of blocks 530 - lens - integer array containing the size of each block 531 532 Level: intermediate 533 534 .keywords: get, number, Jacobi, global, total, blocks 535 536 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks() 537 @*/ 538 PetscErrorCode PCBJacobiGetTotalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 539 { 540 PetscErrorCode ierr; 541 542 PetscFunctionBegin; 543 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 544 PetscValidIntPointer(blocks,2); 545 ierr = PetscUseMethod(pc,"PCBJacobiGetTotalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 546 PetscFunctionReturn(0); 547 } 548 549 #undef __FUNCT__ 550 #define __FUNCT__ "PCBJacobiSetLocalBlocks" 551 /*@ 552 PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block 553 Jacobi preconditioner. 554 555 Not Collective 556 557 Input Parameters: 558 + pc - the preconditioner context 559 . blocks - the number of blocks 560 - lens - [optional] integer array containing size of each block 561 562 Note: 563 Currently only a limited number of blocking configurations are supported. 564 565 Level: intermediate 566 567 .keywords: PC, set, number, Jacobi, local, blocks 568 569 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks() 570 @*/ 571 PetscErrorCode PCBJacobiSetLocalBlocks(PC pc,PetscInt blocks,const PetscInt lens[]) 572 { 573 PetscErrorCode ierr; 574 575 PetscFunctionBegin; 576 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 577 if (blocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks"); 578 ierr = PetscTryMethod(pc,"PCBJacobiSetLocalBlocks_C",(PC,PetscInt,const PetscInt []),(pc,blocks,lens));CHKERRQ(ierr); 579 PetscFunctionReturn(0); 580 } 581 582 #undef __FUNCT__ 583 #define __FUNCT__ "PCBJacobiGetLocalBlocks" 584 /*@C 585 PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block 586 Jacobi preconditioner. 587 588 Not Collective 589 590 Input Parameters: 591 + pc - the preconditioner context 592 . blocks - the number of blocks 593 - lens - [optional] integer array containing size of each block 594 595 Note: 596 Currently only a limited number of blocking configurations are supported. 597 598 Level: intermediate 599 600 .keywords: PC, get, number, Jacobi, local, blocks 601 602 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks() 603 @*/ 604 PetscErrorCode PCBJacobiGetLocalBlocks(PC pc, PetscInt *blocks, const PetscInt *lens[]) 605 { 606 PetscErrorCode ierr; 607 608 PetscFunctionBegin; 609 PetscValidHeaderSpecific(pc, PC_CLASSID,1); 610 PetscValidIntPointer(blocks,2); 611 ierr = PetscUseMethod(pc,"PCBJacobiGetLocalBlocks_C",(PC,PetscInt*, const PetscInt *[]),(pc,blocks,lens));CHKERRQ(ierr); 612 PetscFunctionReturn(0); 613 } 614 615 /* -----------------------------------------------------------------------------------*/ 616 617 /*MC 618 PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with 619 its own KSP object. 620 621 Options Database Keys: 622 . -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal() 623 624 Notes: Each processor can have one or more blocks, but a block cannot be shared by more 625 than one processor. Defaults to one block per processor. 626 627 To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC 628 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 629 630 To set the options on the solvers separate for each block call PCBJacobiGetSubKSP() 631 and set the options directly on the resulting KSP object (you can access its PC 632 KSPGetPC()) 633 634 Level: beginner 635 636 Concepts: block Jacobi 637 638 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 639 PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(), 640 PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices() 641 M*/ 642 643 EXTERN_C_BEGIN 644 #undef __FUNCT__ 645 #define __FUNCT__ "PCCreate_BJacobi" 646 PetscErrorCode PCCreate_BJacobi(PC pc) 647 { 648 PetscErrorCode ierr; 649 PetscMPIInt rank; 650 PC_BJacobi *jac; 651 652 PetscFunctionBegin; 653 ierr = PetscNewLog(pc,PC_BJacobi,&jac);CHKERRQ(ierr); 654 ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr); 655 pc->ops->apply = 0; 656 pc->ops->applytranspose = 0; 657 pc->ops->setup = PCSetUp_BJacobi; 658 pc->ops->destroy = PCDestroy_BJacobi; 659 pc->ops->setfromoptions = PCSetFromOptions_BJacobi; 660 pc->ops->view = PCView_BJacobi; 661 pc->ops->applyrichardson = 0; 662 663 pc->data = (void*)jac; 664 jac->n = -1; 665 jac->n_local = -1; 666 jac->first_local = rank; 667 jac->ksp = 0; 668 jac->use_true_local = PETSC_FALSE; 669 jac->same_local_solves = PETSC_TRUE; 670 jac->g_lens = 0; 671 jac->l_lens = 0; 672 jac->tp_mat = 0; 673 jac->tp_pmat = 0; 674 jac->psubcomm = 0; 675 676 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C", 677 "PCBJacobiSetUseTrueLocal_BJacobi", 678 PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr); 679 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi", 680 PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr); 681 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi", 682 PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr); 683 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi", 684 PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr); 685 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi", 686 PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr); 687 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi", 688 PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr); 689 690 PetscFunctionReturn(0); 691 } 692 EXTERN_C_END 693 694 /* --------------------------------------------------------------------------------------------*/ 695 /* 696 These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI 697 */ 698 #undef __FUNCT__ 699 #define __FUNCT__ "PCReset_BJacobi_Singleblock" 700 PetscErrorCode PCReset_BJacobi_Singleblock(PC pc) 701 { 702 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 703 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 704 PetscErrorCode ierr; 705 706 PetscFunctionBegin; 707 /* 708 If the on processor block had to be generated via a MatGetDiagonalBlock() 709 that creates a copy, this frees the space 710 */ 711 ierr = MatDestroy(&jac->tp_mat);CHKERRQ(ierr); 712 ierr = MatDestroy(&jac->tp_pmat);CHKERRQ(ierr); 713 714 ierr = KSPReset(jac->ksp[0]);CHKERRQ(ierr); 715 ierr = VecDestroy(&bjac->x);CHKERRQ(ierr); 716 ierr = VecDestroy(&bjac->y);CHKERRQ(ierr); 717 PetscFunctionReturn(0); 718 } 719 720 #undef __FUNCT__ 721 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock" 722 PetscErrorCode PCDestroy_BJacobi_Singleblock(PC pc) 723 { 724 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 725 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 726 PetscErrorCode ierr; 727 728 PetscFunctionBegin; 729 ierr = PCReset_BJacobi_Singleblock(pc);CHKERRQ(ierr); 730 ierr = KSPDestroy(&jac->ksp[0]);CHKERRQ(ierr); 731 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 732 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 733 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 734 ierr = PetscFree(bjac);CHKERRQ(ierr); 735 ierr = PetscFree(pc->data);CHKERRQ(ierr); 736 PetscFunctionReturn(0); 737 } 738 739 #undef __FUNCT__ 740 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock" 741 PetscErrorCode PCSetUpOnBlocks_BJacobi_Singleblock(PC pc) 742 { 743 PetscErrorCode ierr; 744 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 745 746 PetscFunctionBegin; 747 ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr); 748 PetscFunctionReturn(0); 749 } 750 751 #undef __FUNCT__ 752 #define __FUNCT__ "PCApply_BJacobi_Singleblock" 753 PetscErrorCode PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y) 754 { 755 PetscErrorCode ierr; 756 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 757 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 758 PetscScalar *x_array,*y_array; 759 760 PetscFunctionBegin; 761 /* 762 The VecPlaceArray() is to avoid having to copy the 763 y vector into the bjac->x vector. The reason for 764 the bjac->x vector is that we need a sequential vector 765 for the sequential solve. 766 */ 767 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 768 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 769 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 770 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 771 ierr = KSPSolve(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 772 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 773 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 774 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 775 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 776 PetscFunctionReturn(0); 777 } 778 779 #undef __FUNCT__ 780 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock" 781 PetscErrorCode PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y) 782 { 783 PetscErrorCode ierr; 784 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 785 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 786 PetscScalar *x_array,*y_array; 787 PC subpc; 788 789 PetscFunctionBegin; 790 /* 791 The VecPlaceArray() is to avoid having to copy the 792 y vector into the bjac->x vector. The reason for 793 the bjac->x vector is that we need a sequential vector 794 for the sequential solve. 795 */ 796 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 797 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 798 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 799 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 800 801 /* apply the symmetric left portion of the inner PC operator */ 802 /* note this by-passes the inner KSP and its options completely */ 803 804 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 805 ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 806 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 807 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 808 809 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 810 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 811 PetscFunctionReturn(0); 812 } 813 814 #undef __FUNCT__ 815 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock" 816 PetscErrorCode PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y) 817 { 818 PetscErrorCode ierr; 819 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 820 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 821 PetscScalar *x_array,*y_array; 822 PC subpc; 823 824 PetscFunctionBegin; 825 /* 826 The VecPlaceArray() is to avoid having to copy the 827 y vector into the bjac->x vector. The reason for 828 the bjac->x vector is that we need a sequential vector 829 for the sequential solve. 830 */ 831 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 832 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 833 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 834 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 835 836 /* apply the symmetric right portion of the inner PC operator */ 837 /* note this by-passes the inner KSP and its options completely */ 838 839 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 840 ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 841 842 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 843 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 844 PetscFunctionReturn(0); 845 } 846 847 #undef __FUNCT__ 848 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock" 849 PetscErrorCode PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y) 850 { 851 PetscErrorCode ierr; 852 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 853 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 854 PetscScalar *x_array,*y_array; 855 856 PetscFunctionBegin; 857 /* 858 The VecPlaceArray() is to avoid having to copy the 859 y vector into the bjac->x vector. The reason for 860 the bjac->x vector is that we need a sequential vector 861 for the sequential solve. 862 */ 863 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 864 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 865 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 866 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 867 ierr = KSPSolveTranspose(jac->ksp[0],bjac->x,bjac->y);CHKERRQ(ierr); 868 ierr = VecResetArray(bjac->x);CHKERRQ(ierr); 869 ierr = VecResetArray(bjac->y);CHKERRQ(ierr); 870 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 871 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 872 PetscFunctionReturn(0); 873 } 874 875 #undef __FUNCT__ 876 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock" 877 static PetscErrorCode PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat) 878 { 879 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 880 PetscErrorCode ierr; 881 PetscInt m; 882 KSP ksp; 883 Vec x,y; 884 PC_BJacobi_Singleblock *bjac; 885 PetscBool wasSetup; 886 887 PetscFunctionBegin; 888 889 /* set default direct solver with no Krylov method */ 890 if (!pc->setupcalled) { 891 const char *prefix; 892 wasSetup = PETSC_FALSE; 893 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 894 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 895 ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr); 896 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 897 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 898 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 899 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 900 /* 901 The reason we need to generate these vectors is to serve 902 as the right-hand side and solution vector for the solve on the 903 block. We do not need to allocate space for the vectors since 904 that is provided via VecPlaceArray() just before the call to 905 KSPSolve() on the block. 906 */ 907 ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr); 908 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&x);CHKERRQ(ierr); 909 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr); 910 ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr); 911 ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr); 912 913 pc->ops->reset = PCReset_BJacobi_Singleblock; 914 pc->ops->destroy = PCDestroy_BJacobi_Singleblock; 915 pc->ops->apply = PCApply_BJacobi_Singleblock; 916 pc->ops->applysymmetricleft = PCApplySymmetricLeft_BJacobi_Singleblock; 917 pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock; 918 pc->ops->applytranspose = PCApplyTranspose_BJacobi_Singleblock; 919 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Singleblock; 920 921 ierr = PetscNewLog(pc,PC_BJacobi_Singleblock,&bjac);CHKERRQ(ierr); 922 bjac->x = x; 923 bjac->y = y; 924 925 ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 926 jac->ksp[0] = ksp; 927 jac->data = (void*)bjac; 928 } else { 929 wasSetup = PETSC_TRUE; 930 ksp = jac->ksp[0]; 931 bjac = (PC_BJacobi_Singleblock *)jac->data; 932 } 933 if (jac->use_true_local) { 934 ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr); 935 } else { 936 ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr); 937 } 938 if (!wasSetup && pc->setfromoptionscalled) { 939 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); 940 } 941 PetscFunctionReturn(0); 942 } 943 944 /* ---------------------------------------------------------------------------------------------*/ 945 #undef __FUNCT__ 946 #define __FUNCT__ "PCReset_BJacobi_Multiblock" 947 PetscErrorCode PCReset_BJacobi_Multiblock(PC pc) 948 { 949 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 950 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 951 PetscErrorCode ierr; 952 PetscInt i; 953 954 PetscFunctionBegin; 955 if (bjac && bjac->pmat) { 956 ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr); 957 if (jac->use_true_local) { 958 ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr); 959 } 960 } 961 962 /* 963 If the on processor block had to be generated via a MatGetDiagonalBlock() 964 that creates a copy, this frees the space 965 */ 966 ierr = MatDestroy(&jac->tp_mat);CHKERRQ(ierr); 967 ierr = MatDestroy(&jac->tp_pmat);CHKERRQ(ierr); 968 969 for (i=0; i<jac->n_local; i++) { 970 ierr = KSPReset(jac->ksp[i]);CHKERRQ(ierr); 971 if (bjac && bjac->x) { 972 ierr = VecDestroy(&bjac->x[i]);CHKERRQ(ierr); 973 ierr = VecDestroy(&bjac->y[i]);CHKERRQ(ierr); 974 ierr = ISDestroy(&bjac->is[i]);CHKERRQ(ierr); 975 } 976 } 977 if (bjac) { 978 ierr = PetscFree2(bjac->x,bjac->y);CHKERRQ(ierr); 979 ierr = PetscFree(bjac->starts);CHKERRQ(ierr); 980 ierr = PetscFree(bjac->is);CHKERRQ(ierr); 981 } 982 ierr = PetscFree(jac->data);CHKERRQ(ierr); 983 ierr = PetscFree(jac->l_lens);CHKERRQ(ierr); 984 ierr = PetscFree(jac->g_lens);CHKERRQ(ierr); 985 PetscFunctionReturn(0); 986 } 987 988 #undef __FUNCT__ 989 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock" 990 PetscErrorCode PCDestroy_BJacobi_Multiblock(PC pc) 991 { 992 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 993 PetscErrorCode ierr; 994 PetscInt i; 995 996 PetscFunctionBegin; 997 ierr = PCReset_BJacobi_Multiblock(pc);CHKERRQ(ierr); 998 for (i=0; i<jac->n_local; i++) { 999 ierr = KSPDestroy(&jac->ksp[i]);CHKERRQ(ierr); 1000 } 1001 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 1002 ierr = PetscFree(pc->data);CHKERRQ(ierr); 1003 PetscFunctionReturn(0); 1004 } 1005 1006 #undef __FUNCT__ 1007 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock" 1008 PetscErrorCode PCSetUpOnBlocks_BJacobi_Multiblock(PC pc) 1009 { 1010 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1011 PetscErrorCode ierr; 1012 PetscInt i,n_local = jac->n_local; 1013 1014 PetscFunctionBegin; 1015 for (i=0; i<n_local; i++) { 1016 ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr); 1017 } 1018 PetscFunctionReturn(0); 1019 } 1020 1021 /* 1022 Preconditioner for block Jacobi 1023 */ 1024 #undef __FUNCT__ 1025 #define __FUNCT__ "PCApply_BJacobi_Multiblock" 1026 PetscErrorCode PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y) 1027 { 1028 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1029 PetscErrorCode ierr; 1030 PetscInt i,n_local = jac->n_local; 1031 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1032 PetscScalar *xin,*yin; 1033 1034 PetscFunctionBegin; 1035 ierr = VecGetArray(x,&xin);CHKERRQ(ierr); 1036 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 1037 for (i=0; i<n_local; i++) { 1038 /* 1039 To avoid copying the subvector from x into a workspace we instead 1040 make the workspace vector array point to the subpart of the array of 1041 the global vector. 1042 */ 1043 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 1044 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 1045 1046 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1047 ierr = KSPSolve(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 1048 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1049 1050 ierr = VecResetArray(bjac->x[i]);CHKERRQ(ierr); 1051 ierr = VecResetArray(bjac->y[i]);CHKERRQ(ierr); 1052 } 1053 ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr); 1054 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 1055 PetscFunctionReturn(0); 1056 } 1057 1058 /* 1059 Preconditioner for block Jacobi 1060 */ 1061 #undef __FUNCT__ 1062 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock" 1063 PetscErrorCode PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y) 1064 { 1065 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1066 PetscErrorCode ierr; 1067 PetscInt i,n_local = jac->n_local; 1068 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1069 PetscScalar *xin,*yin; 1070 1071 PetscFunctionBegin; 1072 ierr = VecGetArray(x,&xin);CHKERRQ(ierr); 1073 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 1074 for (i=0; i<n_local; i++) { 1075 /* 1076 To avoid copying the subvector from x into a workspace we instead 1077 make the workspace vector array point to the subpart of the array of 1078 the global vector. 1079 */ 1080 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 1081 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 1082 1083 ierr = PetscLogEventBegin(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1084 ierr = KSPSolveTranspose(jac->ksp[i],bjac->x[i],bjac->y[i]);CHKERRQ(ierr); 1085 ierr = PetscLogEventEnd(PC_ApplyTransposeOnBlocks,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1086 } 1087 ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr); 1088 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 1089 PetscFunctionReturn(0); 1090 } 1091 1092 #undef __FUNCT__ 1093 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock" 1094 static PetscErrorCode PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat) 1095 { 1096 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1097 PetscErrorCode ierr; 1098 PetscInt m,n_local,N,M,start,i; 1099 const char *prefix,*pprefix,*mprefix; 1100 KSP ksp; 1101 Vec x,y; 1102 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1103 PC subpc; 1104 IS is; 1105 MatReuse scall = MAT_REUSE_MATRIX; 1106 1107 PetscFunctionBegin; 1108 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 1109 1110 n_local = jac->n_local; 1111 1112 if (jac->use_true_local) { 1113 PetscBool same; 1114 ierr = PetscTypeCompare((PetscObject)mat,((PetscObject)pmat)->type_name,&same);CHKERRQ(ierr); 1115 if (!same) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_INCOMP,"Matrices not of same type"); 1116 } 1117 1118 if (!pc->setupcalled) { 1119 scall = MAT_INITIAL_MATRIX; 1120 pc->ops->reset = PCReset_BJacobi_Multiblock; 1121 pc->ops->destroy = PCDestroy_BJacobi_Multiblock; 1122 pc->ops->apply = PCApply_BJacobi_Multiblock; 1123 pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock; 1124 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock; 1125 1126 ierr = PetscNewLog(pc,PC_BJacobi_Multiblock,&bjac);CHKERRQ(ierr); 1127 ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 1128 ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP)));CHKERRQ(ierr); 1129 ierr = PetscMalloc2(n_local,Vec,&bjac->x,n_local,Vec,&bjac->y);CHKERRQ(ierr); 1130 ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr); 1131 ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar)));CHKERRQ(ierr); 1132 1133 jac->data = (void*)bjac; 1134 ierr = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr); 1135 ierr = PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS)));CHKERRQ(ierr); 1136 1137 start = 0; 1138 for (i=0; i<n_local; i++) { 1139 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 1140 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 1141 ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr); 1142 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 1143 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 1144 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1145 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 1146 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 1147 1148 m = jac->l_lens[i]; 1149 1150 /* 1151 The reason we need to generate these vectors is to serve 1152 as the right-hand side and solution vector for the solve on the 1153 block. We do not need to allocate space for the vectors since 1154 that is provided via VecPlaceArray() just before the call to 1155 KSPSolve() on the block. 1156 1157 */ 1158 ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr); 1159 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr); 1160 ierr = PetscLogObjectParent(pc,x);CHKERRQ(ierr); 1161 ierr = PetscLogObjectParent(pc,y);CHKERRQ(ierr); 1162 bjac->x[i] = x; 1163 bjac->y[i] = y; 1164 bjac->starts[i] = start; 1165 jac->ksp[i] = ksp; 1166 1167 ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr); 1168 bjac->is[i] = is; 1169 ierr = PetscLogObjectParent(pc,is);CHKERRQ(ierr); 1170 1171 start += m; 1172 } 1173 } else { 1174 bjac = (PC_BJacobi_Multiblock*)jac->data; 1175 /* 1176 Destroy the blocks from the previous iteration 1177 */ 1178 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1179 ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr); 1180 if (jac->use_true_local) { 1181 ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr); 1182 } 1183 scall = MAT_INITIAL_MATRIX; 1184 } 1185 } 1186 1187 ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr); 1188 if (jac->use_true_local) { 1189 ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr); 1190 ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr); 1191 } 1192 /* Return control to the user so that the submatrices can be modified (e.g., to apply 1193 different boundary conditions for the submatrices than for the global problem) */ 1194 ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 1195 1196 ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr); 1197 for (i=0; i<n_local; i++) { 1198 ierr = PetscLogObjectParent(pc,bjac->pmat[i]);CHKERRQ(ierr); 1199 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr); 1200 if (jac->use_true_local) { 1201 ierr = PetscLogObjectParent(pc,bjac->mat[i]);CHKERRQ(ierr); 1202 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr); 1203 ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr); 1204 } else { 1205 ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr); 1206 } 1207 if(pc->setfromoptionscalled){ 1208 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 1209 } 1210 } 1211 PetscFunctionReturn(0); 1212 } 1213 1214 /* ---------------------------------------------------------------------------------------------*/ 1215 /* 1216 These are for a single block with multiple processes; 1217 */ 1218 #undef __FUNCT__ 1219 #define __FUNCT__ "PCReset_BJacobi_Multiproc" 1220 static PetscErrorCode PCReset_BJacobi_Multiproc(PC pc) 1221 { 1222 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1223 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1224 PetscErrorCode ierr; 1225 1226 PetscFunctionBegin; 1227 ierr = VecDestroy(&mpjac->ysub);CHKERRQ(ierr); 1228 ierr = VecDestroy(&mpjac->xsub);CHKERRQ(ierr); 1229 ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr); 1230 if (mpjac->ksp){ierr = KSPReset(mpjac->ksp);CHKERRQ(ierr);} 1231 PetscFunctionReturn(0); 1232 } 1233 1234 #undef __FUNCT__ 1235 #define __FUNCT__ "PCDestroy_BJacobi_Multiproc" 1236 static PetscErrorCode PCDestroy_BJacobi_Multiproc(PC pc) 1237 { 1238 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1239 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1240 PetscErrorCode ierr; 1241 1242 PetscFunctionBegin; 1243 ierr = PCReset_BJacobi_Multiproc(pc);CHKERRQ(ierr); 1244 ierr = KSPDestroy(&mpjac->ksp);CHKERRQ(ierr); 1245 ierr = PetscSubcommDestroy(&mpjac->psubcomm);CHKERRQ(ierr); 1246 1247 ierr = PetscFree(mpjac);CHKERRQ(ierr); 1248 ierr = PetscFree(pc->data);CHKERRQ(ierr); 1249 PetscFunctionReturn(0); 1250 } 1251 1252 #undef __FUNCT__ 1253 #define __FUNCT__ "PCApply_BJacobi_Multiproc" 1254 static PetscErrorCode PCApply_BJacobi_Multiproc(PC pc,Vec x,Vec y) 1255 { 1256 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1257 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1258 PetscErrorCode ierr; 1259 PetscScalar *xarray,*yarray; 1260 1261 PetscFunctionBegin; 1262 /* place x's and y's local arrays into xsub and ysub */ 1263 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1264 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1265 ierr = VecPlaceArray(mpjac->xsub,xarray);CHKERRQ(ierr); 1266 ierr = VecPlaceArray(mpjac->ysub,yarray);CHKERRQ(ierr); 1267 1268 /* apply preconditioner on each matrix block */ 1269 ierr = KSPSolve(mpjac->ksp,mpjac->xsub,mpjac->ysub);CHKERRQ(ierr); 1270 1271 ierr = VecResetArray(mpjac->xsub);CHKERRQ(ierr); 1272 ierr = VecResetArray(mpjac->ysub);CHKERRQ(ierr); 1273 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1274 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1275 PetscFunctionReturn(0); 1276 } 1277 1278 extern PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,Mat*); 1279 #undef __FUNCT__ 1280 #define __FUNCT__ "PCSetUp_BJacobi_Multiproc" 1281 static PetscErrorCode PCSetUp_BJacobi_Multiproc(PC pc) 1282 { 1283 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1284 PC_BJacobi_Multiproc *mpjac = (PC_BJacobi_Multiproc*)jac->data; 1285 PetscErrorCode ierr; 1286 PetscInt m,n; 1287 MPI_Comm comm = ((PetscObject)pc)->comm,subcomm=0; 1288 const char *prefix; 1289 1290 PetscFunctionBegin; 1291 if (jac->n_local > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only a single block in a subcommunicator is supported"); 1292 jac->n_local = 1; /* currently only a single block is supported for a subcommunicator */ 1293 if (!pc->setupcalled) { 1294 ierr = PetscNewLog(pc,PC_BJacobi_Multiproc,&mpjac);CHKERRQ(ierr); 1295 jac->data = (void*)mpjac; 1296 1297 /* initialize datastructure mpjac */ 1298 if (!jac->psubcomm) { 1299 /* Create default contiguous subcommunicatiors if user does not provide them */ 1300 ierr = PetscSubcommCreate(comm,&jac->psubcomm);CHKERRQ(ierr); 1301 ierr = PetscSubcommSetNumber(jac->psubcomm,jac->n);CHKERRQ(ierr); 1302 ierr = PetscSubcommSetType(jac->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr); 1303 ierr = PetscLogObjectMemory(pc,sizeof(PetscSubcomm));CHKERRQ(ierr); 1304 } 1305 mpjac->psubcomm = jac->psubcomm; 1306 subcomm = mpjac->psubcomm->comm; 1307 1308 /* Get matrix blocks of pmat */ 1309 ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,&mpjac->submats);CHKERRQ(ierr); 1310 1311 /* create a new PC that processors in each subcomm have copy of */ 1312 ierr = KSPCreate(subcomm,&mpjac->ksp);CHKERRQ(ierr); 1313 ierr = PetscObjectIncrementTabLevel((PetscObject)mpjac->ksp,(PetscObject)pc,1);CHKERRQ(ierr); 1314 ierr = PetscLogObjectParent(pc,mpjac->ksp);CHKERRQ(ierr); 1315 ierr = KSPSetOperators(mpjac->ksp,mpjac->submats,mpjac->submats,pc->flag);CHKERRQ(ierr); 1316 ierr = KSPGetPC(mpjac->ksp,&mpjac->pc);CHKERRQ(ierr); 1317 1318 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1319 ierr = KSPSetOptionsPrefix(mpjac->ksp,prefix);CHKERRQ(ierr); 1320 ierr = KSPAppendOptionsPrefix(mpjac->ksp,"sub_");CHKERRQ(ierr); 1321 /* 1322 PetscMPIInt rank,subsize,subrank; 1323 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1324 ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 1325 ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 1326 1327 ierr = MatGetLocalSize(mpjac->submats,&m,PETSC_NULL);CHKERRQ(ierr); 1328 ierr = MatGetSize(mpjac->submats,&n,PETSC_NULL);CHKERRQ(ierr); 1329 ierr = PetscSynchronizedPrintf(comm,"[%d], sub-size %d,sub-rank %d\n",rank,subsize,subrank); 1330 ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr); 1331 */ 1332 1333 /* create dummy vectors xsub and ysub */ 1334 ierr = MatGetLocalSize(mpjac->submats,&m,&n);CHKERRQ(ierr); 1335 ierr = VecCreateMPIWithArray(subcomm,n,PETSC_DECIDE,PETSC_NULL,&mpjac->xsub);CHKERRQ(ierr); 1336 ierr = VecCreateMPIWithArray(subcomm,m,PETSC_DECIDE,PETSC_NULL,&mpjac->ysub);CHKERRQ(ierr); 1337 ierr = PetscLogObjectParent(pc,mpjac->xsub);CHKERRQ(ierr); 1338 ierr = PetscLogObjectParent(pc,mpjac->ysub);CHKERRQ(ierr); 1339 1340 pc->ops->reset = PCReset_BJacobi_Multiproc; 1341 pc->ops->destroy = PCDestroy_BJacobi_Multiproc; 1342 pc->ops->apply = PCApply_BJacobi_Multiproc; 1343 } 1344 1345 if (pc->setupcalled && pc->flag == DIFFERENT_NONZERO_PATTERN) { 1346 /* destroy old matrix blocks, then get new matrix blocks */ 1347 if (mpjac->submats) { 1348 ierr = MatDestroy(&mpjac->submats);CHKERRQ(ierr); 1349 subcomm = mpjac->psubcomm->comm; 1350 ierr = MatGetMultiProcBlock_MPIAIJ(pc->pmat,subcomm,&mpjac->submats);CHKERRQ(ierr); 1351 ierr = KSPSetOperators(mpjac->ksp,mpjac->submats,mpjac->submats,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr); 1352 } 1353 } 1354 1355 if (pc->setfromoptionscalled){ 1356 ierr = KSPSetFromOptions(mpjac->ksp);CHKERRQ(ierr); 1357 } 1358 PetscFunctionReturn(0); 1359 } 1360