1 #define PETSCVEC_DLL 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include "src/vec/vec/impls/mpi/pvecimpl.h" /*I "petscvec.h" I*/ 6 7 /* 8 Note this code is very similar to VecPublish_Seq() 9 */ 10 #undef __FUNCT__ 11 #define __FUNCT__ "VecPublish_MPI" 12 static PetscErrorCode VecPublish_MPI(PetscObject obj) 13 { 14 PetscFunctionBegin; 15 PetscFunctionReturn(0); 16 } 17 18 #undef __FUNCT__ 19 #define __FUNCT__ "VecDot_MPI" 20 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 21 { 22 PetscScalar sum,work; 23 PetscErrorCode ierr; 24 25 PetscFunctionBegin; 26 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 27 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr); 28 *z = sum; 29 PetscFunctionReturn(0); 30 } 31 32 #undef __FUNCT__ 33 #define __FUNCT__ "VecTDot_MPI" 34 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 35 { 36 PetscScalar sum,work; 37 PetscErrorCode ierr; 38 39 PetscFunctionBegin; 40 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 41 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr); 42 *z = sum; 43 PetscFunctionReturn(0); 44 } 45 46 #undef __FUNCT__ 47 #define __FUNCT__ "VecSetOption_MPI" 48 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op) 49 { 50 PetscFunctionBegin; 51 if (op == VEC_IGNORE_OFF_PROC_ENTRIES) { 52 v->stash.donotstash = PETSC_TRUE; 53 } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) { 54 v->stash.donotstash = PETSC_FALSE; 55 } 56 PetscFunctionReturn(0); 57 } 58 59 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *); 60 EXTERN_C_BEGIN 61 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 62 EXTERN_C_END 63 64 #undef __FUNCT__ 65 #define __FUNCT__ "VecPlaceArray_MPI" 66 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 67 { 68 PetscErrorCode ierr; 69 Vec_MPI *v = (Vec_MPI *)vin->data; 70 71 PetscFunctionBegin; 72 if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 73 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 74 v->array = (PetscScalar *)a; 75 if (v->localrep) { 76 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 77 } 78 PetscFunctionReturn(0); 79 } 80 81 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*); 82 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []); 83 84 static struct _VecOps DvOps = { VecDuplicate_MPI, 85 VecDuplicateVecs_Default, 86 VecDestroyVecs_Default, 87 VecDot_MPI, 88 VecMDot_MPI, 89 VecNorm_MPI, 90 VecTDot_MPI, 91 VecMTDot_MPI, 92 VecScale_Seq, 93 VecCopy_Seq, 94 VecSet_Seq, 95 VecSwap_Seq, 96 VecAXPY_Seq, 97 VecAXPBY_Seq, 98 VecMAXPY_Seq, 99 VecAYPX_Seq, 100 VecWAXPY_Seq, 101 VecPointwiseMult_Seq, 102 VecPointwiseDivide_Seq, 103 VecSetValues_MPI, 104 VecAssemblyBegin_MPI, 105 VecAssemblyEnd_MPI, 106 VecGetArray_Seq, 107 VecGetSize_MPI, 108 VecGetSize_Seq, 109 VecRestoreArray_Seq, 110 VecMax_MPI, 111 VecMin_MPI, 112 VecSetRandom_Seq, 113 VecSetOption_MPI, 114 VecSetValuesBlocked_MPI, 115 VecDestroy_MPI, 116 VecView_MPI, 117 VecPlaceArray_MPI, 118 VecReplaceArray_Seq, 119 VecDot_Seq, 120 VecTDot_Seq, 121 VecNorm_Seq, 122 VecMDot_Seq, 123 VecMTDot_Seq, 124 VecLoadIntoVector_Default, 125 VecReciprocal_Default, 126 0, /* VecViewNative... */ 127 VecConjugate_Seq, 128 0, 129 0, 130 VecResetArray_Seq, 131 0, 132 VecMaxPointwiseDivide_Seq, 133 VecLoad_Binary, 134 VecPointwiseMax_Seq, 135 VecPointwiseMaxAbs_Seq, 136 VecPointwiseMin_Seq, 137 VecGetValues_MPI}; 138 139 #undef __FUNCT__ 140 #define __FUNCT__ "VecCreate_MPI_Private" 141 /* 142 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 143 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 144 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 145 */ 146 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[]) 147 { 148 Vec_MPI *s; 149 PetscErrorCode ierr; 150 151 PetscFunctionBegin; 152 153 v->bops->publish = VecPublish_MPI; 154 ierr = PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));CHKERRQ(ierr); 155 ierr = PetscNew(Vec_MPI,&s);CHKERRQ(ierr); 156 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 157 v->data = (void*)s; 158 s->nghost = nghost; 159 v->mapping = 0; 160 v->bmapping = 0; 161 v->petscnative = PETSC_TRUE; 162 163 if (array) { 164 s->array = (PetscScalar *)array; 165 s->array_allocated = 0; 166 } else { 167 PetscInt n = v->n+nghost; 168 ierr = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr); 169 s->array_allocated = s->array; 170 ierr = PetscMemzero(s->array,v->n*sizeof(PetscScalar));CHKERRQ(ierr); 171 } 172 ierr = PetscMapInitialize(v->comm,v->n,v->N,&v->map);CHKERRQ(ierr); 173 174 /* By default parallel vectors do not have local representation */ 175 s->localrep = 0; 176 s->localupdate = 0; 177 178 v->stash.insertmode = NOT_SET_VALUES; 179 /* create the stashes. The block-size for bstash is set later when 180 VecSetValuesBlocked is called. 181 */ 182 ierr = VecStashCreate_Private(v->comm,1,&v->stash);CHKERRQ(ierr); 183 ierr = VecStashCreate_Private(v->comm,v->bs,&v->bstash);CHKERRQ(ierr); 184 185 #if defined(PETSC_HAVE_MATLAB) 186 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr); 187 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr); 188 #endif 189 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 190 ierr = PetscPublishAll(v);CHKERRQ(ierr); 191 PetscFunctionReturn(0); 192 } 193 194 /*MC 195 VECMPI - VECMPI = "mpi" - The basic parallel vector 196 197 Options Database Keys: 198 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 199 200 Level: beginner 201 202 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 203 M*/ 204 205 EXTERN_C_BEGIN 206 #undef __FUNCT__ 207 #define __FUNCT__ "VecCreate_MPI" 208 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv) 209 { 210 PetscErrorCode ierr; 211 212 PetscFunctionBegin; 213 if (vv->bs > 0) { 214 ierr = PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);CHKERRQ(ierr); 215 } else { 216 ierr = PetscSplitOwnership(vv->comm,&vv->n,&vv->N);CHKERRQ(ierr); 217 } 218 ierr = VecCreate_MPI_Private(vv,0,0);CHKERRQ(ierr); 219 PetscFunctionReturn(0); 220 } 221 EXTERN_C_END 222 223 #undef __FUNCT__ 224 #define __FUNCT__ "VecCreateMPIWithArray" 225 /*@C 226 VecCreateMPIWithArray - Creates a parallel, array-style vector, 227 where the user provides the array space to store the vector values. 228 229 Collective on MPI_Comm 230 231 Input Parameters: 232 + comm - the MPI communicator to use 233 . n - local vector length, cannot be PETSC_DECIDE 234 . N - global vector length (or PETSC_DECIDE to have calculated) 235 - array - the user provided array to store the vector values 236 237 Output Parameter: 238 . vv - the vector 239 240 Notes: 241 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 242 same type as an existing vector. 243 244 If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used 245 at a later stage to SET the array for storing the vector values. 246 247 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 248 The user should not free the array until the vector is destroyed. 249 250 Level: intermediate 251 252 Concepts: vectors^creating with array 253 254 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 255 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 256 257 @*/ 258 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 259 { 260 PetscErrorCode ierr; 261 262 PetscFunctionBegin; 263 if (n == PETSC_DECIDE) { 264 SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 265 } 266 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 267 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 268 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 269 ierr = VecCreate_MPI_Private(*vv,0,array);CHKERRQ(ierr); 270 PetscFunctionReturn(0); 271 } 272 273 #undef __FUNCT__ 274 #define __FUNCT__ "VecGhostGetLocalForm" 275 /*@ 276 VecGhostGetLocalForm - Obtains the local ghosted representation of 277 a parallel vector created with VecCreateGhost(). 278 279 Not Collective 280 281 Input Parameter: 282 . g - the global vector. Vector must be have been obtained with either 283 VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq(). 284 285 Output Parameter: 286 . l - the local (ghosted) representation 287 288 Notes: 289 This routine does not actually update the ghost values, but rather it 290 returns a sequential vector that includes the locations for the ghost 291 values and their current values. The returned vector and the original 292 vector passed in share the same array that contains the actual vector data. 293 294 One should call VecGhostRestoreLocalForm() or VecDestroy() once one is 295 finished using the object. 296 297 Level: advanced 298 299 Concepts: vectors^ghost point access 300 301 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray() 302 303 @*/ 304 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l) 305 { 306 PetscErrorCode ierr; 307 PetscTruth isseq,ismpi; 308 309 PetscFunctionBegin; 310 PetscValidHeaderSpecific(g,VEC_COOKIE,1); 311 PetscValidPointer(l,2); 312 313 ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr); 314 ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr); 315 if (ismpi) { 316 Vec_MPI *v = (Vec_MPI*)g->data; 317 if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 318 *l = v->localrep; 319 } else if (isseq) { 320 *l = g; 321 } else { 322 SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name); 323 } 324 ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr); 325 PetscFunctionReturn(0); 326 } 327 328 #undef __FUNCT__ 329 #define __FUNCT__ "VecGhostRestoreLocalForm" 330 /*@ 331 VecGhostRestoreLocalForm - Restores the local ghosted representation of 332 a parallel vector obtained with VecGhostGetLocalForm(). 333 334 Not Collective 335 336 Input Parameter: 337 + g - the global vector 338 - l - the local (ghosted) representation 339 340 Notes: 341 This routine does not actually update the ghost values, but rather it 342 returns a sequential vector that includes the locations for the ghost values 343 and their current values. 344 345 Level: advanced 346 347 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray() 348 @*/ 349 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l) 350 { 351 PetscFunctionBegin; 352 PetscObjectDereference((PetscObject)*l); 353 PetscFunctionReturn(0); 354 } 355 356 #undef __FUNCT__ 357 #define __FUNCT__ "VecGhostUpdateBegin" 358 /*@ 359 VecGhostUpdateBegin - Begins the vector scatter to update the vector from 360 local representation to global or global representation to local. 361 362 Collective on Vec 363 364 Input Parameters: 365 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 366 . insertmode - one of ADD_VALUES or INSERT_VALUES 367 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 368 369 Notes: 370 Use the following to update the ghost regions with correct values from the owning process 371 .vb 372 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 373 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 374 .ve 375 376 Use the following to accumulate the ghost region values onto the owning processors 377 .vb 378 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 379 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 380 .ve 381 382 To accumulate the ghost region values onto the owning processors and then update 383 the ghost regions correctly, call the later followed by the former, i.e., 384 .vb 385 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 386 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 387 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 388 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 389 .ve 390 391 Level: advanced 392 393 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(), 394 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 395 396 @*/ 397 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode) 398 { 399 Vec_MPI *v; 400 PetscErrorCode ierr; 401 402 PetscFunctionBegin; 403 PetscValidHeaderSpecific(g,VEC_COOKIE,1); 404 405 v = (Vec_MPI*)g->data; 406 if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 407 if (!v->localupdate) PetscFunctionReturn(0); 408 409 if (scattermode == SCATTER_REVERSE) { 410 ierr = VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr); 411 } else { 412 ierr = VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr); 413 } 414 PetscFunctionReturn(0); 415 } 416 417 #undef __FUNCT__ 418 #define __FUNCT__ "VecGhostUpdateEnd" 419 /*@ 420 VecGhostUpdateEnd - End the vector scatter to update the vector from 421 local representation to global or global representation to local. 422 423 Collective on Vec 424 425 Input Parameters: 426 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 427 . insertmode - one of ADD_VALUES or INSERT_VALUES 428 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 429 430 Notes: 431 432 Use the following to update the ghost regions with correct values from the owning process 433 .vb 434 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 435 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 436 .ve 437 438 Use the following to accumulate the ghost region values onto the owning processors 439 .vb 440 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 441 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 442 .ve 443 444 To accumulate the ghost region values onto the owning processors and then update 445 the ghost regions correctly, call the later followed by the former, i.e., 446 .vb 447 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 448 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 449 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 450 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 451 .ve 452 453 Level: advanced 454 455 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(), 456 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 457 458 @*/ 459 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode) 460 { 461 Vec_MPI *v; 462 PetscErrorCode ierr; 463 464 PetscFunctionBegin; 465 PetscValidHeaderSpecific(g,VEC_COOKIE,1); 466 467 v = (Vec_MPI*)g->data; 468 if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 469 if (!v->localupdate) PetscFunctionReturn(0); 470 471 if (scattermode == SCATTER_REVERSE) { 472 ierr = VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr); 473 } else { 474 ierr = VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr); 475 } 476 PetscFunctionReturn(0); 477 } 478 479 #undef __FUNCT__ 480 #define __FUNCT__ "VecCreateGhostWithArray" 481 /*@C 482 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 483 the caller allocates the array space. 484 485 Collective on MPI_Comm 486 487 Input Parameters: 488 + comm - the MPI communicator to use 489 . n - local vector length 490 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 491 . nghost - number of local ghost points 492 . ghosts - global indices of ghost points (or PETSC_NULL if not needed) 493 - array - the space to store the vector values (as long as n + nghost) 494 495 Output Parameter: 496 . vv - the global vector representation (without ghost points as part of vector) 497 498 Notes: 499 Use VecGhostGetLocalForm() to access the local, ghosted representation 500 of the vector. 501 502 Level: advanced 503 504 Concepts: vectors^creating with array 505 Concepts: vectors^ghosted 506 507 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 508 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 509 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 510 511 @*/ 512 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 513 { 514 PetscErrorCode ierr; 515 Vec_MPI *w; 516 PetscScalar *larray; 517 IS from,to; 518 519 PetscFunctionBegin; 520 *vv = 0; 521 522 if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 523 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 524 if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 525 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 526 /* Create global representation */ 527 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 528 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 529 ierr = VecCreate_MPI_Private(*vv,nghost,array);CHKERRQ(ierr); 530 w = (Vec_MPI *)(*vv)->data; 531 /* Create local representation */ 532 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 533 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 534 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 535 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 536 537 /* 538 Create scatter context for scattering (updating) ghost values 539 */ 540 ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr); 541 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 542 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 543 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 544 ierr = ISDestroy(to);CHKERRQ(ierr); 545 ierr = ISDestroy(from);CHKERRQ(ierr); 546 547 PetscFunctionReturn(0); 548 } 549 550 #undef __FUNCT__ 551 #define __FUNCT__ "VecCreateGhost" 552 /*@ 553 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 554 555 Collective on MPI_Comm 556 557 Input Parameters: 558 + comm - the MPI communicator to use 559 . n - local vector length 560 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 561 . nghost - number of local ghost points 562 - ghosts - global indices of ghost points 563 564 Output Parameter: 565 . vv - the global vector representation (without ghost points as part of vector) 566 567 Notes: 568 Use VecGhostGetLocalForm() to access the local, ghosted representation 569 of the vector. 570 571 Level: advanced 572 573 Concepts: vectors^ghosted 574 575 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 576 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 577 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 578 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 579 580 @*/ 581 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 582 { 583 PetscErrorCode ierr; 584 585 PetscFunctionBegin; 586 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 587 PetscFunctionReturn(0); 588 } 589 590 #undef __FUNCT__ 591 #define __FUNCT__ "VecDuplicate_MPI" 592 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 593 { 594 PetscErrorCode ierr; 595 Vec_MPI *vw,*w = (Vec_MPI *)win->data; 596 PetscScalar *array; 597 598 PetscFunctionBegin; 599 ierr = VecCreate(win->comm,v);CHKERRQ(ierr); 600 ierr = VecSetSizes(*v,win->n,win->N);CHKERRQ(ierr); 601 ierr = VecCreate_MPI_Private(*v,w->nghost,0);CHKERRQ(ierr); 602 vw = (Vec_MPI *)(*v)->data; 603 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 604 605 /* save local representation of the parallel vector (and scatter) if it exists */ 606 if (w->localrep) { 607 ierr = VecGetArray(*v,&array);CHKERRQ(ierr); 608 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 609 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 610 ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr); 611 ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr); 612 vw->localupdate = w->localupdate; 613 if (vw->localupdate) { 614 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 615 } 616 } 617 618 /* New vector should inherit stashing property of parent */ 619 (*v)->stash.donotstash = win->stash.donotstash; 620 621 ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr); 622 ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr); 623 if (win->mapping) { 624 (*v)->mapping = win->mapping; 625 ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr); 626 } 627 if (win->bmapping) { 628 (*v)->bmapping = win->bmapping; 629 ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr); 630 } 631 (*v)->bs = win->bs; 632 (*v)->bstash.bs = win->bstash.bs; 633 634 PetscFunctionReturn(0); 635 } 636 637 /* ------------------------------------------------------------------------------------------*/ 638 #undef __FUNCT__ 639 #define __FUNCT__ "VecCreateGhostBlockWithArray" 640 /*@C 641 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 642 the caller allocates the array space. Indices in the ghost region are based on blocks. 643 644 Collective on MPI_Comm 645 646 Input Parameters: 647 + comm - the MPI communicator to use 648 . bs - block size 649 . n - local vector length 650 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 651 . nghost - number of local ghost blocks 652 . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed) 653 - array - the space to store the vector values (as long as n + nghost*bs) 654 655 Output Parameter: 656 . vv - the global vector representation (without ghost points as part of vector) 657 658 Notes: 659 Use VecGhostGetLocalForm() to access the local, ghosted representation 660 of the vector. 661 662 n is the local vector size (total local size not the number of blocks) while nghost 663 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 664 portion is bs*nghost 665 666 Level: advanced 667 668 Concepts: vectors^creating ghosted 669 Concepts: vectors^creating with array 670 671 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 672 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 673 VecCreateGhostWithArray(), VecCreateGhostBlocked() 674 675 @*/ 676 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 677 { 678 PetscErrorCode ierr; 679 Vec_MPI *w; 680 PetscScalar *larray; 681 IS from,to; 682 683 PetscFunctionBegin; 684 *vv = 0; 685 686 if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 687 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 688 if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 689 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 690 /* Create global representation */ 691 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 692 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 693 ierr = VecCreate_MPI_Private(*vv,nghost*bs,array);CHKERRQ(ierr); 694 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 695 w = (Vec_MPI *)(*vv)->data; 696 /* Create local representation */ 697 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 698 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 699 ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr); 700 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 701 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 702 703 /* 704 Create scatter context for scattering (updating) ghost values 705 */ 706 ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr); 707 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 708 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 709 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 710 ierr = ISDestroy(to);CHKERRQ(ierr); 711 ierr = ISDestroy(from);CHKERRQ(ierr); 712 713 PetscFunctionReturn(0); 714 } 715 716 #undef __FUNCT__ 717 #define __FUNCT__ "VecCreateGhostBlock" 718 /*@ 719 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 720 The indicing of the ghost points is done with blocks. 721 722 Collective on MPI_Comm 723 724 Input Parameters: 725 + comm - the MPI communicator to use 726 . bs - the block size 727 . n - local vector length 728 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 729 . nghost - number of local ghost blocks 730 - ghosts - global indices of ghost blocks 731 732 Output Parameter: 733 . vv - the global vector representation (without ghost points as part of vector) 734 735 Notes: 736 Use VecGhostGetLocalForm() to access the local, ghosted representation 737 of the vector. 738 739 n is the local vector size (total local size not the number of blocks) while nghost 740 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 741 portion is bs*nghost 742 743 Level: advanced 744 745 Concepts: vectors^ghosted 746 747 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 748 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 749 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 750 751 @*/ 752 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 753 { 754 PetscErrorCode ierr; 755 756 PetscFunctionBegin; 757 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 758 PetscFunctionReturn(0); 759 } 760 761 /* 762 These introduce a ghosted vector where the ghosting is determined by the call to 763 VecSetLocalToGlobalMapping() 764 */ 765 766 #undef __FUNCT__ 767 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI" 768 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map) 769 { 770 PetscErrorCode ierr; 771 Vec_MPI *v = (Vec_MPI *)vv->data; 772 773 PetscFunctionBegin; 774 v->nghost = map->n - vv->n; 775 776 /* we need to make longer the array space that was allocated when the vector was created */ 777 ierr = PetscFree(v->array_allocated);CHKERRQ(ierr); 778 ierr = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr); 779 v->array = v->array_allocated; 780 781 /* Create local representation */ 782 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr); 783 ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr); 784 PetscFunctionReturn(0); 785 } 786 787 788 #undef __FUNCT__ 789 #define __FUNCT__ "VecSetValuesLocal_FETI" 790 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode) 791 { 792 PetscErrorCode ierr; 793 Vec_MPI *v = (Vec_MPI *)vv->data; 794 795 PetscFunctionBegin; 796 ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr); 797 PetscFunctionReturn(0); 798 } 799 800 EXTERN_C_BEGIN 801 #undef __FUNCT__ 802 #define __FUNCT__ "VecCreate_FETI" 803 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv) 804 { 805 PetscErrorCode ierr; 806 807 PetscFunctionBegin; 808 ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr); 809 810 /* overwrite the functions to handle setting values locally */ 811 vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI; 812 vv->ops->setvalueslocal = VecSetValuesLocal_FETI; 813 vv->ops->assemblybegin = 0; 814 vv->ops->assemblyend = 0; 815 vv->ops->setvaluesblocked = 0; 816 vv->ops->setvaluesblocked = 0; 817 818 PetscFunctionReturn(0); 819 } 820 EXTERN_C_END 821 822 823 824 825 826 827 828 829 830