1 /*$Id: bjacobi.c,v 1.160 2001/08/07 03:03:35 balay Exp $*/ 2 /* 3 Defines a block Jacobi preconditioner. 4 */ 5 #include "src/mat/matimpl.h" 6 #include "src/ksp/pc/pcimpl.h" /*I "petscpc.h" I*/ 7 #include "src/ksp/pc/impls/bjacobi/bjacobi.h" 8 9 static int PCSetUp_BJacobi_Singleblock(PC,Mat,Mat); 10 static int PCSetUp_BJacobi_Multiblock(PC,Mat,Mat); 11 12 #undef __FUNCT__ 13 #define __FUNCT__ "PCSetUp_BJacobi" 14 static int PCSetUp_BJacobi(PC pc) 15 { 16 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 17 Mat mat = pc->mat,pmat = pc->pmat; 18 int ierr,N,M,start,i,rank,size,sum,end; 19 int bs,i_start=-1,i_end=-1; 20 char *pprefix,*mprefix; 21 int (*f)(Mat,PetscTruth*,MatReuse,Mat*); 22 23 PetscFunctionBegin; 24 ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr); 25 ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr); 26 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 27 ierr = MatGetBlockSize(pc->pmat,&bs);CHKERRQ(ierr); 28 29 /* ---------- 30 Determines the number of blocks assigned to each processor 31 */ 32 33 /* local block count given */ 34 if (jac->n_local > 0 && jac->n < 0) { 35 ierr = MPI_Allreduce(&jac->n_local,&jac->n,1,MPI_INT,MPI_SUM,pc->comm);CHKERRQ(ierr); 36 if (jac->l_lens) { /* check that user set these correctly */ 37 sum = 0; 38 for (i=0; i<jac->n_local; i++) { 39 if (jac->l_lens[i]/bs*bs !=jac->l_lens[i]) { 40 SETERRQ(PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 41 } 42 sum += jac->l_lens[i]; 43 } 44 if (sum != M) SETERRQ(PETSC_ERR_ARG_SIZ,"Local lens sent incorrectly"); 45 } else { 46 ierr = PetscMalloc(jac->n_local*sizeof(int),&jac->l_lens);CHKERRQ(ierr); 47 for (i=0; i<jac->n_local; i++) { 48 jac->l_lens[i] = bs*((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i)); 49 } 50 } 51 } else if (jac->n > 0 && jac->n_local < 0) { /* global block count given */ 52 /* global blocks given: determine which ones are local */ 53 if (jac->g_lens) { 54 /* check if the g_lens is has valid entries */ 55 for (i=0; i<jac->n; i++) { 56 if (!jac->g_lens[i]) SETERRQ(PETSC_ERR_ARG_SIZ,"Zero block not allowed"); 57 if (jac->g_lens[i]/bs*bs != jac->g_lens[i]) { 58 SETERRQ(PETSC_ERR_ARG_SIZ,"Mat blocksize doesn't match block Jacobi layout"); 59 } 60 } 61 if (size == 1) { 62 jac->n_local = jac->n; 63 ierr = PetscMalloc(jac->n_local*sizeof(int),&jac->l_lens);CHKERRQ(ierr); 64 ierr = PetscMemcpy(jac->l_lens,jac->g_lens,jac->n_local*sizeof(int));CHKERRQ(ierr); 65 /* check that user set these correctly */ 66 sum = 0; 67 for (i=0; i<jac->n_local; i++) sum += jac->l_lens[i]; 68 if (sum != M) SETERRQ(PETSC_ERR_ARG_SIZ,"Global lens sent incorrectly"); 69 } else { 70 ierr = MatGetOwnershipRange(pc->pmat,&start,&end);CHKERRQ(ierr); 71 /* loop over blocks determing first one owned by me */ 72 sum = 0; 73 for (i=0; i<jac->n+1; i++) { 74 if (sum == start) { i_start = i; goto start_1;} 75 if (i < jac->n) sum += jac->g_lens[i]; 76 } 77 SETERRQ(PETSC_ERR_ARG_SIZ,"Block sizes\n\ 78 used in PCBJacobiSetTotalBlocks()\n\ 79 are not compatible with parallel matrix layout"); 80 start_1: 81 for (i=i_start; i<jac->n+1; i++) { 82 if (sum == end) { i_end = i; goto end_1; } 83 if (i < jac->n) sum += jac->g_lens[i]; 84 } 85 SETERRQ(PETSC_ERR_ARG_SIZ,"Block sizes\n\ 86 used in PCBJacobiSetTotalBlocks()\n\ 87 are not compatible with parallel matrix layout"); 88 end_1: 89 jac->n_local = i_end - i_start; 90 ierr = PetscMalloc(jac->n_local*sizeof(int),&jac->l_lens);CHKERRQ(ierr); 91 ierr = PetscMemcpy(jac->l_lens,jac->g_lens+i_start,jac->n_local*sizeof(int));CHKERRQ(ierr); 92 } 93 } else { /* no global blocks given, determine then using default layout */ 94 jac->n_local = jac->n/size + ((jac->n % size) > rank); 95 ierr = PetscMalloc(jac->n_local*sizeof(int),&jac->l_lens);CHKERRQ(ierr); 96 for (i=0; i<jac->n_local; i++) { 97 jac->l_lens[i] = ((M/bs)/jac->n_local + (((M/bs) % jac->n_local) > i))*bs; 98 if (!jac->l_lens[i]) SETERRQ(PETSC_ERR_ARG_SIZ,"Too many blocks given"); 99 } 100 } 101 } else if (jac->n < 0 && jac->n_local < 0) { /* no blocks given */ 102 jac->n = size; 103 jac->n_local = 1; 104 ierr = PetscMalloc(sizeof(int),&jac->l_lens);CHKERRQ(ierr); 105 jac->l_lens[0] = M; 106 } 107 108 ierr = MPI_Comm_size(pc->comm,&size);CHKERRQ(ierr); 109 ierr = PetscObjectQueryFunction((PetscObject)pc->mat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 110 if (size == 1 && !f) { 111 mat = pc->mat; 112 pmat = pc->pmat; 113 } else { 114 PetscTruth iscopy; 115 MatReuse scall; 116 117 if (jac->use_true_local) { 118 scall = MAT_INITIAL_MATRIX; 119 if (pc->setupcalled) { 120 if (pc->flag == SAME_NONZERO_PATTERN) { 121 if (jac->tp_mat) { 122 scall = MAT_REUSE_MATRIX; 123 mat = jac->tp_mat; 124 } 125 } else { 126 if (jac->tp_mat) { 127 ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr); 128 } 129 } 130 } 131 if (!f) { 132 SETERRQ(PETSC_ERR_SUP,"This matrix does not support getting diagonal block"); 133 } 134 ierr = (*f)(pc->mat,&iscopy,scall,&mat);CHKERRQ(ierr); 135 /* make submatrix have same prefix as entire matrix */ 136 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->mat,&mprefix);CHKERRQ(ierr); 137 ierr = PetscObjectSetOptionsPrefix((PetscObject)mat,mprefix);CHKERRQ(ierr); 138 if (iscopy) { 139 jac->tp_mat = mat; 140 } 141 } 142 if (pc->pmat != pc->mat || !jac->use_true_local) { 143 scall = MAT_INITIAL_MATRIX; 144 if (pc->setupcalled) { 145 if (pc->flag == SAME_NONZERO_PATTERN) { 146 if (jac->tp_pmat) { 147 scall = MAT_REUSE_MATRIX; 148 pmat = jac->tp_pmat; 149 } 150 } else { 151 if (jac->tp_pmat) { 152 ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr); 153 } 154 } 155 } 156 ierr = PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 157 if (!f) { 158 SETERRQ(PETSC_ERR_SUP,"This matrix does not support getting diagonal block"); 159 } 160 ierr = (*f)(pc->pmat,&iscopy,scall,&pmat);CHKERRQ(ierr); 161 /* make submatrix have same prefix as entire matrix */ 162 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 163 ierr = PetscObjectSetOptionsPrefix((PetscObject)pmat,pprefix);CHKERRQ(ierr); 164 if (iscopy) { 165 jac->tp_pmat = pmat; 166 } 167 } else { 168 pmat = mat; 169 } 170 } 171 172 /* ------ 173 Setup code depends on the number of blocks 174 */ 175 if (jac->n_local == 1) { 176 ierr = PCSetUp_BJacobi_Singleblock(pc,mat,pmat);CHKERRQ(ierr); 177 } else { 178 ierr = PCSetUp_BJacobi_Multiblock(pc,mat,pmat);CHKERRQ(ierr); 179 } 180 PetscFunctionReturn(0); 181 } 182 183 /* Default destroy, if it has never been setup */ 184 #undef __FUNCT__ 185 #define __FUNCT__ "PCDestroy_BJacobi" 186 static int PCDestroy_BJacobi(PC pc) 187 { 188 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 189 int ierr; 190 191 PetscFunctionBegin; 192 if (jac->g_lens) {ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);} 193 if (jac->l_lens) {ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);} 194 ierr = PetscFree(jac);CHKERRQ(ierr); 195 PetscFunctionReturn(0); 196 } 197 198 #undef __FUNCT__ 199 #define __FUNCT__ "PCSetFromOptions_BJacobi" 200 static int PCSetFromOptions_BJacobi(PC pc) 201 { 202 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 203 int blocks,ierr; 204 PetscTruth flg; 205 206 PetscFunctionBegin; 207 ierr = PetscOptionsHead("Block Jacobi options");CHKERRQ(ierr); 208 ierr = PetscOptionsInt("-pc_bjacobi_blocks","Total number of blocks","PCBJacobiSetTotalBlocks",jac->n,&blocks,&flg);CHKERRQ(ierr); 209 if (flg) { 210 ierr = PCBJacobiSetTotalBlocks(pc,blocks,PETSC_NULL);CHKERRQ(ierr); 211 } 212 ierr = PetscOptionsName("-pc_bjacobi_truelocal","Use the true matrix, not preconditioner matrix to define matrix vector product in sub-problems","PCBJacobiSetUseTrueLocal",&flg);CHKERRQ(ierr); 213 if (flg) { 214 ierr = PCBJacobiSetUseTrueLocal(pc);CHKERRQ(ierr); 215 } 216 ierr = PetscOptionsTail();CHKERRQ(ierr); 217 PetscFunctionReturn(0); 218 } 219 220 #undef __FUNCT__ 221 #define __FUNCT__ "PCView_BJacobi" 222 static int PCView_BJacobi(PC pc,PetscViewer viewer) 223 { 224 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 225 int rank,ierr,i; 226 PetscTruth isascii,isstring; 227 PetscViewer sviewer; 228 229 PetscFunctionBegin; 230 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr); 231 ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);CHKERRQ(ierr); 232 if (isascii) { 233 if (jac->use_true_local) { 234 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: using true local matrix, number of blocks = %d\n",jac->n);CHKERRQ(ierr); 235 } 236 ierr = PetscViewerASCIIPrintf(viewer," block Jacobi: number of blocks = %d\n",jac->n);CHKERRQ(ierr); 237 ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr); 238 if (jac->same_local_solves) { 239 ierr = PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr); 240 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 241 if (!rank && jac->ksp) { 242 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 243 ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr); 244 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 245 } 246 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 247 } else { 248 249 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 250 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: number of local blocks = %d, first local block number = %d\n", 251 rank,jac->n_local,jac->first_local);CHKERRQ(ierr); 252 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 253 for (i=0; i<jac->n_local; i++) { 254 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Proc %d: local block number %d\n",rank,i);CHKERRQ(ierr); 255 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 256 ierr = KSPView(jac->ksp[i],sviewer);CHKERRQ(ierr); 257 if (i != jac->n_local-1) { 258 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 259 } 260 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 261 } 262 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 263 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 264 } 265 } else if (isstring) { 266 ierr = PetscViewerStringSPrintf(viewer," blks=%d",jac->n);CHKERRQ(ierr); 267 ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 268 if (jac->ksp) {ierr = KSPView(jac->ksp[0],sviewer);CHKERRQ(ierr);} 269 ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 270 } else { 271 SETERRQ1(1,"Viewer type %s not supported for block Jacobi",((PetscObject)viewer)->type_name); 272 } 273 PetscFunctionReturn(0); 274 } 275 276 /* -------------------------------------------------------------------------------------*/ 277 278 EXTERN_C_BEGIN 279 #undef __FUNCT__ 280 #define __FUNCT__ "PCBJacobiSetUseTrueLocal_BJacobi" 281 int PCBJacobiSetUseTrueLocal_BJacobi(PC pc) 282 { 283 PC_BJacobi *jac; 284 285 PetscFunctionBegin; 286 jac = (PC_BJacobi*)pc->data; 287 jac->use_true_local = PETSC_TRUE; 288 PetscFunctionReturn(0); 289 } 290 EXTERN_C_END 291 292 EXTERN_C_BEGIN 293 #undef __FUNCT__ 294 #define __FUNCT__ "PCBJacobiGetSubKSP_BJacobi" 295 int PCBJacobiGetSubKSP_BJacobi(PC pc,int *n_local,int *first_local,KSP **ksp) 296 { 297 PC_BJacobi *jac = (PC_BJacobi*)pc->data;; 298 299 PetscFunctionBegin; 300 if (!pc->setupcalled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must call KSPSetUp() or PCSetUp() first"); 301 302 if (n_local) *n_local = jac->n_local; 303 if (first_local) *first_local = jac->first_local; 304 *ksp = jac->ksp; 305 jac->same_local_solves = PETSC_FALSE; /* Assume that local solves are now different; 306 not necessarily true though! This flag is 307 used only for PCView_BJacobi() */ 308 PetscFunctionReturn(0); 309 } 310 EXTERN_C_END 311 312 EXTERN_C_BEGIN 313 #undef __FUNCT__ 314 #define __FUNCT__ "PCBJacobiSetTotalBlocks_BJacobi" 315 int PCBJacobiSetTotalBlocks_BJacobi(PC pc,int blocks,int *lens) 316 { 317 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 318 int ierr; 319 320 PetscFunctionBegin; 321 322 if (pc->setupcalled > 0) SETERRQ(1,"Cannot set number of blocks after PCSetUp()/KSPSetUp() has been called"); 323 jac->n = blocks; 324 if (!lens) { 325 jac->g_lens = 0; 326 } else { 327 ierr = PetscMalloc(blocks*sizeof(int),&jac->g_lens);CHKERRQ(ierr); 328 PetscLogObjectMemory(pc,blocks*sizeof(int)); 329 ierr = PetscMemcpy(jac->g_lens,lens,blocks*sizeof(int));CHKERRQ(ierr); 330 } 331 PetscFunctionReturn(0); 332 } 333 EXTERN_C_END 334 335 EXTERN_C_BEGIN 336 #undef __FUNCT__ 337 #define __FUNCT__ "PCBJacobiGetTotalBlocks_BJacobi" 338 int PCBJacobiGetTotalBlocks_BJacobi(PC pc, int *blocks, const int *lens[]) 339 { 340 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 341 342 PetscFunctionBegin; 343 *blocks = jac->n; 344 if (lens) *lens = jac->g_lens; 345 PetscFunctionReturn(0); 346 } 347 EXTERN_C_END 348 349 EXTERN_C_BEGIN 350 #undef __FUNCT__ 351 #define __FUNCT__ "PCBJacobiSetLocalBlocks_BJacobi" 352 int PCBJacobiSetLocalBlocks_BJacobi(PC pc,int blocks,const int lens[]) 353 { 354 PC_BJacobi *jac; 355 int ierr; 356 357 PetscFunctionBegin; 358 jac = (PC_BJacobi*)pc->data; 359 360 jac->n_local = blocks; 361 if (!lens) { 362 jac->l_lens = 0; 363 } else { 364 ierr = PetscMalloc(blocks*sizeof(int),&jac->l_lens);CHKERRQ(ierr); 365 PetscLogObjectMemory(pc,blocks*sizeof(int)); 366 ierr = PetscMemcpy(jac->l_lens,lens,blocks*sizeof(int));CHKERRQ(ierr); 367 } 368 PetscFunctionReturn(0); 369 } 370 EXTERN_C_END 371 372 EXTERN_C_BEGIN 373 #undef __FUNCT__ 374 #define __FUNCT__ "PCBJacobiGetLocalBlocks_BJacobi" 375 int PCBJacobiGetLocalBlocks_BJacobi(PC pc, int *blocks, const int *lens[]) 376 { 377 PC_BJacobi *jac = (PC_BJacobi*) pc->data; 378 379 PetscFunctionBegin; 380 *blocks = jac->n_local; 381 if (lens) *lens = jac->l_lens; 382 PetscFunctionReturn(0); 383 } 384 EXTERN_C_END 385 386 /* -------------------------------------------------------------------------------------*/ 387 388 #undef __FUNCT__ 389 #define __FUNCT__ "PCBJacobiSetUseTrueLocal" 390 /*@ 391 PCBJacobiSetUseTrueLocal - Sets a flag to indicate that the block 392 problem is associated with the linear system matrix instead of the 393 default (where it is associated with the preconditioning matrix). 394 That is, if the local system is solved iteratively then it iterates 395 on the block from the matrix using the block from the preconditioner 396 as the preconditioner for the local block. 397 398 Collective on PC 399 400 Input Parameters: 401 . pc - the preconditioner context 402 403 Options Database Key: 404 . -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal() 405 406 Notes: 407 For the common case in which the preconditioning and linear 408 system matrices are identical, this routine is unnecessary. 409 410 Level: intermediate 411 412 .keywords: block, Jacobi, set, true, local, flag 413 414 .seealso: PCSetOperators(), PCBJacobiSetLocalBlocks() 415 @*/ 416 int PCBJacobiSetUseTrueLocal(PC pc) 417 { 418 int ierr,(*f)(PC); 419 420 PetscFunctionBegin; 421 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 422 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C",(void (**)(void))&f);CHKERRQ(ierr); 423 if (f) { 424 ierr = (*f)(pc);CHKERRQ(ierr); 425 } 426 427 PetscFunctionReturn(0); 428 } 429 430 #undef __FUNCT__ 431 #define __FUNCT__ "PCBJacobiGetSubKSP" 432 /*@C 433 PCBJacobiGetSubKSP - Gets the local KSP contexts for all blocks on 434 this processor. 435 436 Note Collective 437 438 Input Parameter: 439 . pc - the preconditioner context 440 441 Output Parameters: 442 + n_local - the number of blocks on this processor, or PETSC_NULL 443 . first_local - the global number of the first block on this processor, or PETSC_NULL 444 - ksp - the array of KSP contexts 445 446 Notes: 447 After PCBJacobiGetSubKSP() the array of KSP contexts is not to be freed. 448 449 Currently for some matrix implementations only 1 block per processor 450 is supported. 451 452 You must call KSPSetUp() or PCSetUp() before calling PCBJacobiGetSubKSP(). 453 454 Level: advanced 455 456 .keywords: block, Jacobi, get, sub, KSP, context 457 458 .seealso: PCBJacobiGetSubKSP() 459 @*/ 460 int PCBJacobiGetSubKSP(PC pc,int *n_local,int *first_local,KSP *ksp[]) 461 { 462 int ierr,(*f)(PC,int *,int *,KSP **); 463 464 PetscFunctionBegin; 465 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 466 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiGetSubKSP_C",(void (**)(void))&f);CHKERRQ(ierr); 467 if (f) { 468 ierr = (*f)(pc,n_local,first_local,ksp);CHKERRQ(ierr); 469 } else { 470 SETERRQ(1,"Cannot get subsolvers for this preconditioner"); 471 } 472 PetscFunctionReturn(0); 473 } 474 475 #undef __FUNCT__ 476 #define __FUNCT__ "PCBJacobiSetTotalBlocks" 477 /*@ 478 PCBJacobiSetTotalBlocks - Sets the global number of blocks for the block 479 Jacobi preconditioner. 480 481 Collective on PC 482 483 Input Parameters: 484 + pc - the preconditioner context 485 . blocks - the number of blocks 486 - lens - [optional] integer array containing the size of each block 487 488 Options Database Key: 489 . -pc_bjacobi_blocks <blocks> - Sets the number of global blocks 490 491 Notes: 492 Currently only a limited number of blocking configurations are supported. 493 All processors sharing the PC must call this routine with the same data. 494 495 Level: intermediate 496 497 .keywords: set, number, Jacobi, global, total, blocks 498 499 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetLocalBlocks() 500 @*/ 501 int PCBJacobiSetTotalBlocks(PC pc,int blocks,const int lens[]) 502 { 503 int ierr,(*f)(PC,int,const int[]); 504 505 PetscFunctionBegin; 506 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 507 if (blocks <= 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must have positive blocks"); 508 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiSetTotalBlocks_C",(void (**)(void))&f);CHKERRQ(ierr); 509 if (f) { 510 ierr = (*f)(pc,blocks,lens);CHKERRQ(ierr); 511 } 512 PetscFunctionReturn(0); 513 } 514 515 #undef __FUNCT__ 516 #define __FUNCT__ "PCBJacobiGetTotalBlocks" 517 /*@C 518 PCBJacobiGetTotalBlocks - Gets the global number of blocks for the block 519 Jacobi preconditioner. 520 521 Collective on PC 522 523 Input Parameter: 524 . pc - the preconditioner context 525 526 Output parameters: 527 + blocks - the number of blocks 528 - lens - integer array containing the size of each block 529 530 Level: intermediate 531 532 .keywords: get, number, Jacobi, global, total, blocks 533 534 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetLocalBlocks() 535 @*/ 536 int PCBJacobiGetTotalBlocks(PC pc, int *blocks, const int *lens[]) 537 { 538 int ierr,(*f)(PC,int*, const int *[]); 539 540 PetscFunctionBegin; 541 PetscValidHeaderSpecific(pc, PC_COOKIE,1); 542 PetscValidIntPointer(blocks,2); 543 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiGetTotalBlocks_C",(void (**)(void))&f);CHKERRQ(ierr); 544 if (f) { 545 ierr = (*f)(pc,blocks,lens);CHKERRQ(ierr); 546 } 547 PetscFunctionReturn(0); 548 } 549 550 #undef __FUNCT__ 551 #define __FUNCT__ "PCBJacobiSetLocalBlocks" 552 /*@ 553 PCBJacobiSetLocalBlocks - Sets the local number of blocks for the block 554 Jacobi preconditioner. 555 556 Not Collective 557 558 Input Parameters: 559 + pc - the preconditioner context 560 . blocks - the number of blocks 561 - lens - [optional] integer array containing size of each block 562 563 Note: 564 Currently only a limited number of blocking configurations are supported. 565 566 Level: intermediate 567 568 .keywords: PC, set, number, Jacobi, local, blocks 569 570 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiSetTotalBlocks() 571 @*/ 572 int PCBJacobiSetLocalBlocks(PC pc,int blocks,const int lens[]) 573 { 574 int ierr,(*f)(PC,int,const int []); 575 576 PetscFunctionBegin; 577 PetscValidHeaderSpecific(pc,PC_COOKIE,1); 578 if (blocks < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must have nonegative blocks"); 579 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiSetLocalBlocks_C",(void (**)(void))&f);CHKERRQ(ierr); 580 if (f) { 581 ierr = (*f)(pc,blocks,lens);CHKERRQ(ierr); 582 } 583 PetscFunctionReturn(0); 584 } 585 586 #undef __FUNCT__ 587 #define __FUNCT__ "PCBJacobiGetLocalBlocks" 588 /*@C 589 PCBJacobiGetLocalBlocks - Gets the local number of blocks for the block 590 Jacobi preconditioner. 591 592 Not Collective 593 594 Input Parameters: 595 + pc - the preconditioner context 596 . blocks - the number of blocks 597 - lens - [optional] integer array containing size of each block 598 599 Note: 600 Currently only a limited number of blocking configurations are supported. 601 602 Level: intermediate 603 604 .keywords: PC, get, number, Jacobi, local, blocks 605 606 .seealso: PCBJacobiSetUseTrueLocal(), PCBJacobiGetTotalBlocks() 607 @*/ 608 int PCBJacobiGetLocalBlocks(PC pc, int *blocks, const int *lens[]) 609 { 610 int ierr,(*f)(PC,int*, const int *[]); 611 612 PetscFunctionBegin; 613 PetscValidHeaderSpecific(pc, PC_COOKIE,1); 614 PetscValidIntPointer(blocks,2); 615 ierr = PetscObjectQueryFunction((PetscObject)pc,"PCBJacobiGetLocalBlocks_C",(void (**)(void))&f);CHKERRQ(ierr); 616 if (f) { 617 ierr = (*f)(pc,blocks,lens);CHKERRQ(ierr); 618 } 619 PetscFunctionReturn(0); 620 } 621 622 /* -----------------------------------------------------------------------------------*/ 623 624 /*MC 625 PCBJACOBI - Use block Jacobi preconditioning, each block is (approximately) solved with 626 its own KSP object. 627 628 Options Database Keys: 629 . -pc_bjacobi_truelocal - Activates PCBJacobiSetUseTrueLocal() 630 631 Notes: Each processor can have one or more blocks, but a block cannot be shared by more 632 than one processor. Defaults to one block per processor. 633 634 To set options on the solvers for each block append -sub_ to all the KSP, KSP, and PC 635 options database keys. For example, -sub_pc_type ilu -sub_pc_ilu_levels 1 -sub_ksp_type preonly 636 637 To set the options on the solvers seperate for each block call PCBJacobiGetSubKSP() 638 and set the options directly on the resulting KSP object (you can access its PC 639 KSPGetPC()) 640 641 Level: beginner 642 643 Concepts: block Jacobi 644 645 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 646 PCASM, PCBJacobiSetUseTrueLocal(), PCBJacobiGetSubKSP(), PCBJacobiSetTotalBlocks(), 647 PCBJacobiSetLocalBlocks(), PCSetModifySubmatrices() 648 M*/ 649 650 EXTERN_C_BEGIN 651 #undef __FUNCT__ 652 #define __FUNCT__ "PCCreate_BJacobi" 653 int PCCreate_BJacobi(PC pc) 654 { 655 int rank,ierr; 656 PC_BJacobi *jac; 657 658 PetscFunctionBegin; 659 ierr = PetscNew(PC_BJacobi,&jac);CHKERRQ(ierr); 660 PetscLogObjectMemory(pc,sizeof(PC_BJacobi)); 661 ierr = MPI_Comm_rank(pc->comm,&rank);CHKERRQ(ierr); 662 pc->ops->apply = 0; 663 pc->ops->applytranspose = 0; 664 pc->ops->setup = PCSetUp_BJacobi; 665 pc->ops->destroy = PCDestroy_BJacobi; 666 pc->ops->setfromoptions = PCSetFromOptions_BJacobi; 667 pc->ops->view = PCView_BJacobi; 668 pc->ops->applyrichardson = 0; 669 670 pc->data = (void*)jac; 671 jac->n = -1; 672 jac->n_local = -1; 673 jac->first_local = rank; 674 jac->ksp = 0; 675 jac->use_true_local = PETSC_FALSE; 676 jac->same_local_solves = PETSC_TRUE; 677 jac->g_lens = 0; 678 jac->l_lens = 0; 679 jac->tp_mat = 0; 680 jac->tp_pmat = 0; 681 682 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetUseTrueLocal_C", 683 "PCBJacobiSetUseTrueLocal_BJacobi", 684 PCBJacobiSetUseTrueLocal_BJacobi);CHKERRQ(ierr); 685 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetSubKSP_C","PCBJacobiGetSubKSP_BJacobi", 686 PCBJacobiGetSubKSP_BJacobi);CHKERRQ(ierr); 687 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetTotalBlocks_C","PCBJacobiSetTotalBlocks_BJacobi", 688 PCBJacobiSetTotalBlocks_BJacobi);CHKERRQ(ierr); 689 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetTotalBlocks_C","PCBJacobiGetTotalBlocks_BJacobi", 690 PCBJacobiGetTotalBlocks_BJacobi);CHKERRQ(ierr); 691 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiSetLocalBlocks_C","PCBJacobiSetLocalBlocks_BJacobi", 692 PCBJacobiSetLocalBlocks_BJacobi);CHKERRQ(ierr); 693 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCBJacobiGetLocalBlocks_C","PCBJacobiGetLocalBlocks_BJacobi", 694 PCBJacobiGetLocalBlocks_BJacobi);CHKERRQ(ierr); 695 696 PetscFunctionReturn(0); 697 } 698 EXTERN_C_END 699 700 /* --------------------------------------------------------------------------------------------*/ 701 /* 702 These are for a single block per processor; works for AIJ, BAIJ; Seq and MPI 703 */ 704 #undef __FUNCT__ 705 #define __FUNCT__ "PCDestroy_BJacobi_Singleblock" 706 int PCDestroy_BJacobi_Singleblock(PC pc) 707 { 708 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 709 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 710 int ierr; 711 712 PetscFunctionBegin; 713 /* 714 If the on processor block had to be generated via a MatGetDiagonalBlock() 715 that creates a copy (for example MPIBDiag matrices do), this frees the space 716 */ 717 if (jac->tp_mat) { 718 ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr); 719 } 720 if (jac->tp_pmat) { 721 ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr); 722 } 723 724 ierr = KSPDestroy(jac->ksp[0]);CHKERRQ(ierr); 725 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 726 ierr = VecDestroy(bjac->x);CHKERRQ(ierr); 727 ierr = VecDestroy(bjac->y);CHKERRQ(ierr); 728 if (jac->l_lens) {ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);} 729 if (jac->g_lens) {ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);} 730 ierr = PetscFree(bjac);CHKERRQ(ierr); 731 ierr = PetscFree(jac);CHKERRQ(ierr); 732 PetscFunctionReturn(0); 733 } 734 735 #undef __FUNCT__ 736 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Singleblock" 737 int PCSetUpOnBlocks_BJacobi_Singleblock(PC pc) 738 { 739 int ierr; 740 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 741 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 742 743 PetscFunctionBegin; 744 ierr = KSPSetRhs(jac->ksp[0],bjac->x);CHKERRQ(ierr); 745 ierr = KSPSetSolution(jac->ksp[0],bjac->y);CHKERRQ(ierr); 746 ierr = KSPSetUp(jac->ksp[0]);CHKERRQ(ierr); 747 PetscFunctionReturn(0); 748 } 749 750 #undef __FUNCT__ 751 #define __FUNCT__ "PCApply_BJacobi_Singleblock" 752 int PCApply_BJacobi_Singleblock(PC pc,Vec x,Vec y) 753 { 754 int ierr; 755 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 756 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 757 PetscScalar *x_array,*y_array; 758 759 PetscFunctionBegin; 760 /* 761 The VecPlaceArray() is to avoid having to copy the 762 y vector into the bjac->x vector. The reason for 763 the bjac->x vector is that we need a sequential vector 764 for the sequential solve. 765 */ 766 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 767 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 768 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 769 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 770 ierr = KSPSolve(jac->ksp[0]);CHKERRQ(ierr); 771 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 772 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 773 PetscFunctionReturn(0); 774 } 775 776 #undef __FUNCT__ 777 #define __FUNCT__ "PCApplySymmetricLeft_BJacobi_Singleblock" 778 int PCApplySymmetricLeft_BJacobi_Singleblock(PC pc,Vec x,Vec y) 779 { 780 int ierr; 781 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 782 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 783 PetscScalar *x_array,*y_array; 784 PC subpc; 785 786 PetscFunctionBegin; 787 /* 788 The VecPlaceArray() is to avoid having to copy the 789 y vector into the bjac->x vector. The reason for 790 the bjac->x vector is that we need a sequential vector 791 for the sequential solve. 792 */ 793 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 794 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 795 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 796 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 797 798 /* apply the symmetric left portion of the inner PC operator */ 799 /* note this by-passes the inner KSP and its options completely */ 800 801 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 802 ierr = PCApplySymmetricLeft(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 803 804 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 805 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 806 PetscFunctionReturn(0); 807 } 808 809 #undef __FUNCT__ 810 #define __FUNCT__ "PCApplySymmetricRight_BJacobi_Singleblock" 811 int PCApplySymmetricRight_BJacobi_Singleblock(PC pc,Vec x,Vec y) 812 { 813 int ierr; 814 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 815 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 816 PetscScalar *x_array,*y_array; 817 PC subpc; 818 819 PetscFunctionBegin; 820 /* 821 The VecPlaceArray() is to avoid having to copy the 822 y vector into the bjac->x vector. The reason for 823 the bjac->x vector is that we need a sequential vector 824 for the sequential solve. 825 */ 826 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 827 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 828 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 829 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 830 831 /* apply the symmetric right portion of the inner PC operator */ 832 /* note this by-passes the inner KSP and its options completely */ 833 834 ierr = KSPGetPC(jac->ksp[0],&subpc);CHKERRQ(ierr); 835 ierr = PCApplySymmetricRight(subpc,bjac->x,bjac->y);CHKERRQ(ierr); 836 837 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 838 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 839 PetscFunctionReturn(0); 840 } 841 842 #undef __FUNCT__ 843 #define __FUNCT__ "PCApplyTranspose_BJacobi_Singleblock" 844 int PCApplyTranspose_BJacobi_Singleblock(PC pc,Vec x,Vec y) 845 { 846 int ierr; 847 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 848 PC_BJacobi_Singleblock *bjac = (PC_BJacobi_Singleblock*)jac->data; 849 PetscScalar *x_array,*y_array; 850 851 PetscFunctionBegin; 852 /* 853 The VecPlaceArray() is to avoid having to copy the 854 y vector into the bjac->x vector. The reason for 855 the bjac->x vector is that we need a sequential vector 856 for the sequential solve. 857 */ 858 ierr = VecGetArray(x,&x_array);CHKERRQ(ierr); 859 ierr = VecGetArray(y,&y_array);CHKERRQ(ierr); 860 ierr = VecPlaceArray(bjac->x,x_array);CHKERRQ(ierr); 861 ierr = VecPlaceArray(bjac->y,y_array);CHKERRQ(ierr); 862 ierr = KSPSolveTranspose(jac->ksp[0]);CHKERRQ(ierr); 863 ierr = VecRestoreArray(x,&x_array);CHKERRQ(ierr); 864 ierr = VecRestoreArray(y,&y_array);CHKERRQ(ierr); 865 PetscFunctionReturn(0); 866 } 867 868 #undef __FUNCT__ 869 #define __FUNCT__ "PCSetUp_BJacobi_Singleblock" 870 static int PCSetUp_BJacobi_Singleblock(PC pc,Mat mat,Mat pmat) 871 { 872 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 873 int ierr,m; 874 KSP ksp; 875 Vec x,y; 876 PC_BJacobi_Singleblock *bjac; 877 PC subpc; 878 879 PetscFunctionBegin; 880 881 /* set default direct solver with no Krylov method */ 882 if (!pc->setupcalled) { 883 char *prefix; 884 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 885 PetscLogObjectParent(pc,ksp); 886 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 887 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 888 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 889 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 890 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 891 /* 892 The reason we need to generate these vectors is to serve 893 as the right-hand side and solution vector for the solve on the 894 block. We do not need to allocate space for the vectors since 895 that is provided via VecPlaceArray() just before the call to 896 KSPSolve() on the block. 897 */ 898 ierr = MatGetSize(pmat,&m,&m);CHKERRQ(ierr); 899 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&x);CHKERRQ(ierr); 900 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr); 901 PetscLogObjectParent(pc,x); 902 PetscLogObjectParent(pc,y); 903 904 pc->ops->destroy = PCDestroy_BJacobi_Singleblock; 905 pc->ops->apply = PCApply_BJacobi_Singleblock; 906 pc->ops->applysymmetricleft = PCApplySymmetricLeft_BJacobi_Singleblock; 907 pc->ops->applysymmetricright = PCApplySymmetricRight_BJacobi_Singleblock; 908 pc->ops->applytranspose = PCApplyTranspose_BJacobi_Singleblock; 909 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Singleblock; 910 911 ierr = PetscMalloc(sizeof(PC_BJacobi_Singleblock),&bjac);CHKERRQ(ierr); 912 PetscLogObjectMemory(pc,sizeof(PC_BJacobi_Singleblock)); 913 bjac->x = x; 914 bjac->y = y; 915 916 ierr = PetscMalloc(sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 917 jac->ksp[0] = ksp; 918 jac->data = (void*)bjac; 919 } else { 920 ksp = jac->ksp[0]; 921 bjac = (PC_BJacobi_Singleblock *)jac->data; 922 } 923 if (jac->use_true_local) { 924 ierr = KSPSetOperators(ksp,mat,pmat,pc->flag);CHKERRQ(ierr); 925 } else { 926 ierr = KSPSetOperators(ksp,pmat,pmat,pc->flag);CHKERRQ(ierr); 927 } 928 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); 929 PetscFunctionReturn(0); 930 } 931 932 /* ---------------------------------------------------------------------------------------------*/ 933 934 #undef __FUNCT__ 935 #define __FUNCT__ "PCDestroy_BJacobi_Multiblock" 936 int PCDestroy_BJacobi_Multiblock(PC pc) 937 { 938 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 939 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 940 int i,ierr; 941 942 PetscFunctionBegin; 943 ierr = MatDestroyMatrices(jac->n_local,&bjac->pmat);CHKERRQ(ierr); 944 if (jac->use_true_local) { 945 ierr = MatDestroyMatrices(jac->n_local,&bjac->mat);CHKERRQ(ierr); 946 } 947 948 /* 949 If the on processor block had to be generated via a MatGetDiagonalBlock() 950 that creates a copy (for example MPIBDiag matrices do), this frees the space 951 */ 952 if (jac->tp_mat) { 953 ierr = MatDestroy(jac->tp_mat);CHKERRQ(ierr); 954 } 955 if (jac->tp_pmat) { 956 ierr = MatDestroy(jac->tp_pmat);CHKERRQ(ierr); 957 } 958 959 for (i=0; i<jac->n_local; i++) { 960 ierr = KSPDestroy(jac->ksp[i]);CHKERRQ(ierr); 961 ierr = VecDestroy(bjac->x[i]);CHKERRQ(ierr); 962 ierr = VecDestroy(bjac->y[i]);CHKERRQ(ierr); 963 ierr = ISDestroy(bjac->is[i]);CHKERRQ(ierr); 964 } 965 ierr = PetscFree(jac->ksp);CHKERRQ(ierr); 966 ierr = PetscFree(bjac->x);CHKERRQ(ierr); 967 ierr = PetscFree(bjac->starts);CHKERRQ(ierr); 968 ierr = PetscFree(bjac->is);CHKERRQ(ierr); 969 ierr = PetscFree(bjac);CHKERRQ(ierr); 970 if (jac->l_lens) {ierr = PetscFree(jac->l_lens);CHKERRQ(ierr);} 971 if (jac->g_lens) {ierr = PetscFree(jac->g_lens);CHKERRQ(ierr);} 972 ierr = PetscFree(jac);CHKERRQ(ierr); 973 PetscFunctionReturn(0); 974 } 975 976 #undef __FUNCT__ 977 #define __FUNCT__ "PCSetUpOnBlocks_BJacobi_Multiblock" 978 int PCSetUpOnBlocks_BJacobi_Multiblock(PC pc) 979 { 980 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 981 int ierr,i,n_local = jac->n_local; 982 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 983 984 PetscFunctionBegin; 985 for (i=0; i<n_local; i++) { 986 ierr = KSPSetRhs(jac->ksp[i],bjac->x[i]);CHKERRQ(ierr); 987 ierr = KSPSetSolution(jac->ksp[i],bjac->y[i]);CHKERRQ(ierr); 988 ierr = KSPSetUp(jac->ksp[i]);CHKERRQ(ierr); 989 } 990 PetscFunctionReturn(0); 991 } 992 993 /* 994 Preconditioner for block Jacobi 995 */ 996 #undef __FUNCT__ 997 #define __FUNCT__ "PCApply_BJacobi_Multiblock" 998 int PCApply_BJacobi_Multiblock(PC pc,Vec x,Vec y) 999 { 1000 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1001 int ierr,i,n_local = jac->n_local; 1002 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1003 PetscScalar *xin,*yin; 1004 static PetscTruth flag = PETSC_TRUE; 1005 static int SUBKspSolve; 1006 1007 PetscFunctionBegin; 1008 if (flag) { 1009 ierr = PetscLogEventRegister(&SUBKspSolve,"SubKspSolve",KSP_COOKIE);CHKERRQ(ierr); 1010 flag = PETSC_FALSE; 1011 } 1012 ierr = VecGetArray(x,&xin);CHKERRQ(ierr); 1013 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 1014 for (i=0; i<n_local; i++) { 1015 /* 1016 To avoid copying the subvector from x into a workspace we instead 1017 make the workspace vector array point to the subpart of the array of 1018 the global vector. 1019 */ 1020 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 1021 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 1022 1023 ierr = PetscLogEventBegin(SUBKspSolve,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1024 ierr = KSPSolve(jac->ksp[i]);CHKERRQ(ierr); 1025 ierr = PetscLogEventEnd(SUBKspSolve,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1026 } 1027 ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr); 1028 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 1029 PetscFunctionReturn(0); 1030 } 1031 1032 /* 1033 Preconditioner for block Jacobi 1034 */ 1035 #undef __FUNCT__ 1036 #define __FUNCT__ "PCApplyTranspose_BJacobi_Multiblock" 1037 int PCApplyTranspose_BJacobi_Multiblock(PC pc,Vec x,Vec y) 1038 { 1039 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1040 int ierr,i,n_local = jac->n_local; 1041 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1042 PetscScalar *xin,*yin; 1043 static PetscTruth flag = PETSC_TRUE; 1044 static int SUBKspSolve; 1045 1046 PetscFunctionBegin; 1047 if (flag) { 1048 ierr = PetscLogEventRegister(&SUBKspSolve,"SubKspSolveTranspose",KSP_COOKIE);CHKERRQ(ierr); 1049 flag = PETSC_FALSE; 1050 } 1051 ierr = VecGetArray(x,&xin);CHKERRQ(ierr); 1052 ierr = VecGetArray(y,&yin);CHKERRQ(ierr); 1053 for (i=0; i<n_local; i++) { 1054 /* 1055 To avoid copying the subvector from x into a workspace we instead 1056 make the workspace vector array point to the subpart of the array of 1057 the global vector. 1058 */ 1059 ierr = VecPlaceArray(bjac->x[i],xin+bjac->starts[i]);CHKERRQ(ierr); 1060 ierr = VecPlaceArray(bjac->y[i],yin+bjac->starts[i]);CHKERRQ(ierr); 1061 1062 ierr = PetscLogEventBegin(SUBKspSolve,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1063 ierr = KSPSolveTranspose(jac->ksp[i]);CHKERRQ(ierr); 1064 ierr = PetscLogEventEnd(SUBKspSolve,jac->ksp[i],bjac->x[i],bjac->y[i],0);CHKERRQ(ierr); 1065 } 1066 ierr = VecRestoreArray(x,&xin);CHKERRQ(ierr); 1067 ierr = VecRestoreArray(y,&yin);CHKERRQ(ierr); 1068 PetscFunctionReturn(0); 1069 } 1070 1071 #undef __FUNCT__ 1072 #define __FUNCT__ "PCSetUp_BJacobi_Multiblock" 1073 static int PCSetUp_BJacobi_Multiblock(PC pc,Mat mat,Mat pmat) 1074 { 1075 PC_BJacobi *jac = (PC_BJacobi*)pc->data; 1076 int ierr,m,n_local,N,M,start,i; 1077 char *prefix,*pprefix,*mprefix; 1078 KSP ksp; 1079 Vec x,y; 1080 PC_BJacobi_Multiblock *bjac = (PC_BJacobi_Multiblock*)jac->data; 1081 PC subpc; 1082 IS is; 1083 MatReuse scall = MAT_REUSE_MATRIX; 1084 1085 PetscFunctionBegin; 1086 ierr = MatGetLocalSize(pc->pmat,&M,&N);CHKERRQ(ierr); 1087 1088 n_local = jac->n_local; 1089 1090 if (jac->use_true_local) { 1091 if (mat->type != pmat->type) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices not of same type"); 1092 } 1093 1094 /* set default direct solver with no Krylov method */ 1095 if (!pc->setupcalled) { 1096 scall = MAT_INITIAL_MATRIX; 1097 pc->ops->destroy = PCDestroy_BJacobi_Multiblock; 1098 pc->ops->apply = PCApply_BJacobi_Multiblock; 1099 pc->ops->applytranspose= PCApplyTranspose_BJacobi_Multiblock; 1100 pc->ops->setuponblocks = PCSetUpOnBlocks_BJacobi_Multiblock; 1101 1102 ierr = PetscMalloc(sizeof(PC_BJacobi_Multiblock),&bjac);CHKERRQ(ierr); 1103 PetscLogObjectMemory(pc,sizeof(PC_BJacobi_Multiblock)); 1104 ierr = PetscMalloc(n_local*sizeof(KSP),&jac->ksp);CHKERRQ(ierr); 1105 PetscLogObjectMemory(pc,sizeof(n_local*sizeof(KSP))); 1106 ierr = PetscMalloc(2*n_local*sizeof(Vec),&bjac->x);CHKERRQ(ierr); 1107 PetscLogObjectMemory(pc,sizeof(2*n_local*sizeof(Vec))); 1108 bjac->y = bjac->x + n_local; 1109 ierr = PetscMalloc(n_local*sizeof(PetscScalar),&bjac->starts);CHKERRQ(ierr); 1110 PetscLogObjectMemory(pc,sizeof(n_local*sizeof(PetscScalar))); 1111 1112 jac->data = (void*)bjac; 1113 ierr = PetscMalloc(n_local*sizeof(IS),&bjac->is);CHKERRQ(ierr); 1114 PetscLogObjectMemory(pc,sizeof(n_local*sizeof(IS))); 1115 1116 start = 0; 1117 for (i=0; i<n_local; i++) { 1118 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 1119 PetscLogObjectParent(pc,ksp); 1120 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 1121 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 1122 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 1123 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 1124 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 1125 1126 m = jac->l_lens[i]; 1127 1128 /* 1129 The reason we need to generate these vectors is to serve 1130 as the right-hand side and solution vector for the solve on the 1131 block. We do not need to allocate space for the vectors since 1132 that is provided via VecPlaceArray() just before the call to 1133 KSPSolve() on the block. 1134 1135 */ 1136 ierr = VecCreateSeq(PETSC_COMM_SELF,m,&x);CHKERRQ(ierr); 1137 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,m,PETSC_NULL,&y);CHKERRQ(ierr); 1138 PetscLogObjectParent(pc,x); 1139 PetscLogObjectParent(pc,y); 1140 bjac->x[i] = x; 1141 bjac->y[i] = y; 1142 bjac->starts[i] = start; 1143 jac->ksp[i] = ksp; 1144 1145 ierr = ISCreateStride(PETSC_COMM_SELF,m,start,1,&is);CHKERRQ(ierr); 1146 bjac->is[i] = is; 1147 PetscLogObjectParent(pc,is); 1148 1149 start += m; 1150 } 1151 } else { 1152 bjac = (PC_BJacobi_Multiblock*)jac->data; 1153 /* 1154 Destroy the blocks from the previous iteration 1155 */ 1156 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 1157 ierr = MatDestroyMatrices(n_local,&bjac->pmat);CHKERRQ(ierr); 1158 if (jac->use_true_local) { 1159 ierr = MatDestroyMatrices(n_local,&bjac->mat);CHKERRQ(ierr); 1160 } 1161 scall = MAT_INITIAL_MATRIX; 1162 } 1163 } 1164 1165 ierr = MatGetSubMatrices(pmat,n_local,bjac->is,bjac->is,scall,&bjac->pmat);CHKERRQ(ierr); 1166 if (jac->use_true_local) { 1167 ierr = PetscObjectGetOptionsPrefix((PetscObject)mat,&mprefix);CHKERRQ(ierr); 1168 ierr = MatGetSubMatrices(mat,n_local,bjac->is,bjac->is,scall,&bjac->mat);CHKERRQ(ierr); 1169 } 1170 /* Return control to the user so that the submatrices can be modified (e.g., to apply 1171 different boundary conditions for the submatrices than for the global problem) */ 1172 ierr = PCModifySubMatrices(pc,n_local,bjac->is,bjac->is,bjac->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 1173 1174 ierr = PetscObjectGetOptionsPrefix((PetscObject)pmat,&pprefix);CHKERRQ(ierr); 1175 for (i=0; i<n_local; i++) { 1176 PetscLogObjectParent(pc,bjac->pmat[i]); 1177 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->pmat[i],pprefix);CHKERRQ(ierr); 1178 if (jac->use_true_local) { 1179 PetscLogObjectParent(pc,bjac->mat[i]); 1180 ierr = PetscObjectSetOptionsPrefix((PetscObject)bjac->mat[i],mprefix);CHKERRQ(ierr); 1181 ierr = KSPSetOperators(jac->ksp[i],bjac->mat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr); 1182 } else { 1183 ierr = KSPSetOperators(jac->ksp[i],bjac->pmat[i],bjac->pmat[i],pc->flag);CHKERRQ(ierr); 1184 } 1185 ierr = KSPSetFromOptions(jac->ksp[i]);CHKERRQ(ierr); 1186 } 1187 1188 PetscFunctionReturn(0); 1189 } 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200