1 #define PETSCVEC_DLL 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include "../src/vec/vec/impls/mpi/pvecimpl.h" /*I "petscvec.h" I*/ 6 7 #if 0 8 #undef __FUNCT__ 9 #define __FUNCT__ "VecPublish_MPI" 10 static PetscErrorCode VecPublish_MPI(PetscObject obj) 11 { 12 PetscFunctionBegin; 13 PetscFunctionReturn(0); 14 } 15 #endif 16 17 #undef __FUNCT__ 18 #define __FUNCT__ "VecDot_MPI" 19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 20 { 21 PetscScalar sum,work; 22 PetscErrorCode ierr; 23 24 PetscFunctionBegin; 25 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 26 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 27 *z = sum; 28 PetscFunctionReturn(0); 29 } 30 31 #undef __FUNCT__ 32 #define __FUNCT__ "VecTDot_MPI" 33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 34 { 35 PetscScalar sum,work; 36 PetscErrorCode ierr; 37 38 PetscFunctionBegin; 39 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 40 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 41 *z = sum; 42 PetscFunctionReturn(0); 43 } 44 45 #undef __FUNCT__ 46 #define __FUNCT__ "VecSetOption_MPI" 47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag) 48 { 49 PetscFunctionBegin; 50 if (op == VEC_IGNORE_OFF_PROC_ENTRIES) { 51 v->stash.donotstash = flag; 52 } else if (op == VEC_IGNORE_NEGATIVE_INDICES) { 53 v->stash.ignorenegidx = flag; 54 } 55 PetscFunctionReturn(0); 56 } 57 58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *); 59 EXTERN_C_BEGIN 60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 61 EXTERN_C_END 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "VecPlaceArray_MPI" 65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 66 { 67 PetscErrorCode ierr; 68 Vec_MPI *v = (Vec_MPI *)vin->data; 69 70 PetscFunctionBegin; 71 if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 72 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 73 v->array = (PetscScalar *)a; 74 if (v->localrep) { 75 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 76 } 77 PetscFunctionReturn(0); 78 } 79 80 #undef __FUNCT__ 81 #define __FUNCT__ "VecResetArray_MPI" 82 PetscErrorCode VecResetArray_MPI(Vec vin) 83 { 84 Vec_MPI *v = (Vec_MPI *)vin->data; 85 PetscErrorCode ierr; 86 87 PetscFunctionBegin; 88 v->array = v->unplacedarray; 89 v->unplacedarray = 0; 90 if (v->localrep) { 91 ierr = VecResetArray(v->localrep);CHKERRQ(ierr); 92 } 93 PetscFunctionReturn(0); 94 } 95 96 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []); 97 98 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */ 99 VecDuplicateVecs_Default, 100 VecDestroyVecs_Default, 101 VecDot_MPI, 102 VecMDot_MPI, 103 VecNorm_MPI, 104 VecTDot_MPI, 105 VecMTDot_MPI, 106 VecScale_Seq, 107 VecCopy_Seq, /* 10 */ 108 VecSet_Seq, 109 VecSwap_Seq, 110 VecAXPY_Seq, 111 VecAXPBY_Seq, 112 VecMAXPY_Seq, 113 VecAYPX_Seq, 114 VecWAXPY_Seq, 115 VecAXPBYPCZ_Seq, 116 VecPointwiseMult_Seq, 117 VecPointwiseDivide_Seq, 118 VecSetValues_MPI, /* 20 */ 119 VecAssemblyBegin_MPI, 120 VecAssemblyEnd_MPI, 121 VecGetArray_Seq, 122 VecGetSize_MPI, 123 VecGetSize_Seq, 124 VecRestoreArray_Seq, 125 VecMax_MPI, 126 VecMin_MPI, 127 VecSetRandom_Seq, 128 VecSetOption_MPI, 129 VecSetValuesBlocked_MPI, 130 VecDestroy_MPI, 131 VecView_MPI, 132 VecPlaceArray_MPI, 133 VecReplaceArray_Seq, 134 VecDot_Seq, 135 VecTDot_Seq, 136 VecNorm_Seq, 137 VecMDot_Seq, 138 VecMTDot_Seq, 139 VecLoad_Default, 140 0, /* VecLoadIntoVectorNative */ 141 VecReciprocal_Default, 142 0, /* VecViewNative... */ 143 VecConjugate_Seq, 144 0, 145 0, 146 VecResetArray_MPI, 147 0, 148 VecMaxPointwiseDivide_Seq, 149 VecPointwiseMax_Seq, 150 VecPointwiseMaxAbs_Seq, 151 VecPointwiseMin_Seq, 152 VecGetValues_MPI, 153 0, 154 0, 155 0, 156 0, 157 0, 158 0, 159 VecStrideGather_Default, 160 VecStrideScatter_Default 161 }; 162 163 #undef __FUNCT__ 164 #define __FUNCT__ "VecCreate_MPI_Private" 165 /* 166 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 167 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 168 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 169 170 If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise 171 no space is allocated. 172 */ 173 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[]) 174 { 175 Vec_MPI *s; 176 PetscErrorCode ierr; 177 178 PetscFunctionBegin; 179 180 ierr = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr); 181 v->data = (void*)s; 182 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 183 s->nghost = nghost; 184 v->mapping = 0; 185 v->bmapping = 0; 186 v->petscnative = PETSC_TRUE; 187 188 if (v->map->bs == -1) v->map->bs = 1; 189 ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr); 190 s->array = (PetscScalar *)array; 191 s->array_allocated = 0; 192 if (alloc && !array) { 193 PetscInt n = v->map->n+nghost; 194 ierr = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr); 195 ierr = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr); 196 ierr = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr); 197 s->array_allocated = s->array; 198 } 199 200 /* By default parallel vectors do not have local representation */ 201 s->localrep = 0; 202 s->localupdate = 0; 203 204 v->stash.insertmode = NOT_SET_VALUES; 205 /* create the stashes. The block-size for bstash is set later when 206 VecSetValuesBlocked is called. 207 */ 208 ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr); 209 ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr); 210 211 #if defined(PETSC_HAVE_MATLAB_ENGINE) 212 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr); 213 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr); 214 #endif 215 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 216 ierr = PetscPublishAll(v);CHKERRQ(ierr); 217 PetscFunctionReturn(0); 218 } 219 220 /*MC 221 VECMPI - VECMPI = "mpi" - The basic parallel vector 222 223 Options Database Keys: 224 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 225 226 Level: beginner 227 228 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 229 M*/ 230 231 EXTERN_C_BEGIN 232 #undef __FUNCT__ 233 #define __FUNCT__ "VecCreate_MPI" 234 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv) 235 { 236 PetscErrorCode ierr; 237 238 PetscFunctionBegin; 239 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr); 240 PetscFunctionReturn(0); 241 } 242 EXTERN_C_END 243 244 #undef __FUNCT__ 245 #define __FUNCT__ "VecCreateMPIWithArray" 246 /*@C 247 VecCreateMPIWithArray - Creates a parallel, array-style vector, 248 where the user provides the array space to store the vector values. 249 250 Collective on MPI_Comm 251 252 Input Parameters: 253 + comm - the MPI communicator to use 254 . n - local vector length, cannot be PETSC_DECIDE 255 . N - global vector length (or PETSC_DECIDE to have calculated) 256 - array - the user provided array to store the vector values 257 258 Output Parameter: 259 . vv - the vector 260 261 Notes: 262 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 263 same type as an existing vector. 264 265 If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used 266 at a later stage to SET the array for storing the vector values. 267 268 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 269 The user should not free the array until the vector is destroyed. 270 271 Level: intermediate 272 273 Concepts: vectors^creating with array 274 275 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 276 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 277 278 @*/ 279 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 280 { 281 PetscErrorCode ierr; 282 283 PetscFunctionBegin; 284 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 285 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 286 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 287 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 288 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 289 PetscFunctionReturn(0); 290 } 291 292 #undef __FUNCT__ 293 #define __FUNCT__ "VecGhostStateSync_Private" 294 /* 295 This is used in VecGhostGetLocalForm and VecGhostRestoreLocalForm to ensure 296 that the state is updated if either vector has changed since the last time 297 one of these functions was called. It could apply to any PetscObject, but 298 VecGhost is quite different from other objects in that two separate vectors 299 look at the same memory. 300 301 In principle, we could only propagate state to the local vector on 302 GetLocalForm and to the global vector on RestoreLocalForm, but this version is 303 more conservative (i.e. robust against misuse) and simpler. 304 305 Note that this function is correct and changes nothing if both arguments are the 306 same, which is the case in serial. 307 */ 308 static PetscErrorCode VecGhostStateSync_Private(Vec g,Vec l) 309 { 310 PetscErrorCode ierr; 311 PetscInt gstate,lstate; 312 313 PetscFunctionBegin; 314 ierr = PetscObjectStateQuery((PetscObject)g,&gstate);CHKERRQ(ierr); 315 ierr = PetscObjectStateQuery((PetscObject)l,&lstate);CHKERRQ(ierr); 316 ierr = PetscObjectSetState((PetscObject)g,PetscMax(gstate,lstate));CHKERRQ(ierr); 317 ierr = PetscObjectSetState((PetscObject)l,PetscMax(gstate,lstate));CHKERRQ(ierr); 318 PetscFunctionReturn(0); 319 } 320 321 #undef __FUNCT__ 322 #define __FUNCT__ "VecGhostGetLocalForm" 323 /*@ 324 VecGhostGetLocalForm - Obtains the local ghosted representation of 325 a parallel vector created with VecCreateGhost(). 326 327 Not Collective 328 329 Input Parameter: 330 . g - the global vector. Vector must be have been obtained with either 331 VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq(). 332 333 Output Parameter: 334 . l - the local (ghosted) representation 335 336 Notes: 337 This routine does not actually update the ghost values, but rather it 338 returns a sequential vector that includes the locations for the ghost 339 values and their current values. The returned vector and the original 340 vector passed in share the same array that contains the actual vector data. 341 342 One should call VecGhostRestoreLocalForm() or VecDestroy() once one is 343 finished using the object. 344 345 Level: advanced 346 347 Concepts: vectors^ghost point access 348 349 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray() 350 351 @*/ 352 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l) 353 { 354 PetscErrorCode ierr; 355 PetscTruth isseq,ismpi; 356 357 PetscFunctionBegin; 358 PetscValidHeaderSpecific(g,VEC_CLASSID,1); 359 PetscValidPointer(l,2); 360 361 ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr); 362 ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr); 363 if (ismpi) { 364 Vec_MPI *v = (Vec_MPI*)g->data; 365 if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 366 *l = v->localrep; 367 } else if (isseq) { 368 *l = g; 369 } else { 370 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name); 371 } 372 ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr); 373 ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr); 374 PetscFunctionReturn(0); 375 } 376 377 #undef __FUNCT__ 378 #define __FUNCT__ "VecGhostRestoreLocalForm" 379 /*@ 380 VecGhostRestoreLocalForm - Restores the local ghosted representation of 381 a parallel vector obtained with VecGhostGetLocalForm(). 382 383 Not Collective 384 385 Input Parameter: 386 + g - the global vector 387 - l - the local (ghosted) representation 388 389 Notes: 390 This routine does not actually update the ghost values, but rather it 391 returns a sequential vector that includes the locations for the ghost values 392 and their current values. 393 394 Level: advanced 395 396 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray() 397 @*/ 398 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l) 399 { 400 PetscErrorCode ierr; 401 402 PetscFunctionBegin; 403 ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr); 404 ierr = PetscObjectDereference((PetscObject)*l);CHKERRQ(ierr); 405 PetscFunctionReturn(0); 406 } 407 408 #undef __FUNCT__ 409 #define __FUNCT__ "VecGhostUpdateBegin" 410 /*@ 411 VecGhostUpdateBegin - Begins the vector scatter to update the vector from 412 local representation to global or global representation to local. 413 414 Collective on Vec 415 416 Input Parameters: 417 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 418 . insertmode - one of ADD_VALUES or INSERT_VALUES 419 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 420 421 Notes: 422 Use the following to update the ghost regions with correct values from the owning process 423 .vb 424 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 425 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 426 .ve 427 428 Use the following to accumulate the ghost region values onto the owning processors 429 .vb 430 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 431 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 432 .ve 433 434 To accumulate the ghost region values onto the owning processors and then update 435 the ghost regions correctly, call the later followed by the former, i.e., 436 .vb 437 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 438 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 439 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 440 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 441 .ve 442 443 Level: advanced 444 445 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(), 446 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 447 448 @*/ 449 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode) 450 { 451 Vec_MPI *v; 452 PetscErrorCode ierr; 453 454 PetscFunctionBegin; 455 PetscValidHeaderSpecific(g,VEC_CLASSID,1); 456 457 v = (Vec_MPI*)g->data; 458 if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 459 if (!v->localupdate) PetscFunctionReturn(0); 460 461 if (scattermode == SCATTER_REVERSE) { 462 ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr); 463 } else { 464 ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr); 465 } 466 PetscFunctionReturn(0); 467 } 468 469 #undef __FUNCT__ 470 #define __FUNCT__ "VecGhostUpdateEnd" 471 /*@ 472 VecGhostUpdateEnd - End the vector scatter to update the vector from 473 local representation to global or global representation to local. 474 475 Collective on Vec 476 477 Input Parameters: 478 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 479 . insertmode - one of ADD_VALUES or INSERT_VALUES 480 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 481 482 Notes: 483 484 Use the following to update the ghost regions with correct values from the owning process 485 .vb 486 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 487 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 488 .ve 489 490 Use the following to accumulate the ghost region values onto the owning processors 491 .vb 492 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 493 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 494 .ve 495 496 To accumulate the ghost region values onto the owning processors and then update 497 the ghost regions correctly, call the later followed by the former, i.e., 498 .vb 499 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 500 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 501 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 502 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 503 .ve 504 505 Level: advanced 506 507 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(), 508 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 509 510 @*/ 511 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode) 512 { 513 Vec_MPI *v; 514 PetscErrorCode ierr; 515 516 PetscFunctionBegin; 517 PetscValidHeaderSpecific(g,VEC_CLASSID,1); 518 519 v = (Vec_MPI*)g->data; 520 if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 521 if (!v->localupdate) PetscFunctionReturn(0); 522 523 if (scattermode == SCATTER_REVERSE) { 524 ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr); 525 } else { 526 ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr); 527 } 528 PetscFunctionReturn(0); 529 } 530 531 #undef __FUNCT__ 532 #define __FUNCT__ "VecCreateGhostWithArray" 533 /*@C 534 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 535 the caller allocates the array space. 536 537 Collective on MPI_Comm 538 539 Input Parameters: 540 + comm - the MPI communicator to use 541 . n - local vector length 542 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 543 . nghost - number of local ghost points 544 . ghosts - global indices of ghost points (or PETSC_NULL if not needed) 545 - array - the space to store the vector values (as long as n + nghost) 546 547 Output Parameter: 548 . vv - the global vector representation (without ghost points as part of vector) 549 550 Notes: 551 Use VecGhostGetLocalForm() to access the local, ghosted representation 552 of the vector. 553 554 This also automatically sets the ISLocalToGlobalMapping() for this vector. 555 556 Level: advanced 557 558 Concepts: vectors^creating with array 559 Concepts: vectors^ghosted 560 561 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 562 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 563 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 564 565 @*/ 566 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 567 { 568 PetscErrorCode ierr; 569 Vec_MPI *w; 570 PetscScalar *larray; 571 IS from,to; 572 ISLocalToGlobalMapping ltog; 573 PetscInt rstart,i,*indices; 574 575 PetscFunctionBegin; 576 *vv = 0; 577 578 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 579 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 580 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 581 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 582 /* Create global representation */ 583 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 584 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 585 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr); 586 w = (Vec_MPI *)(*vv)->data; 587 /* Create local representation */ 588 ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr); 589 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 590 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 591 ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr); 592 593 /* 594 Create scatter context for scattering (updating) ghost values 595 */ 596 ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr); 597 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 598 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 599 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 600 ierr = ISDestroy(to);CHKERRQ(ierr); 601 ierr = ISDestroy(from);CHKERRQ(ierr); 602 603 /* set local to global mapping for ghosted vector */ 604 ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 605 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 606 for (i=0; i<n; i++) { 607 indices[i] = rstart + i; 608 } 609 for (i=0; i<nghost; i++) { 610 indices[n+i] = ghosts[i]; 611 } 612 ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,<og);CHKERRQ(ierr); 613 ierr = PetscFree(indices);CHKERRQ(ierr); 614 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 615 ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr); 616 ierr = PetscFree(indices);CHKERRQ(ierr); 617 PetscFunctionReturn(0); 618 } 619 620 #undef __FUNCT__ 621 #define __FUNCT__ "VecCreateGhost" 622 /*@ 623 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 624 625 Collective on MPI_Comm 626 627 Input Parameters: 628 + comm - the MPI communicator to use 629 . n - local vector length 630 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 631 . nghost - number of local ghost points 632 - ghosts - global indices of ghost points 633 634 Output Parameter: 635 . vv - the global vector representation (without ghost points as part of vector) 636 637 Notes: 638 Use VecGhostGetLocalForm() to access the local, ghosted representation 639 of the vector. 640 641 This also automatically sets the ISLocalToGlobalMapping() for this vector. 642 643 Level: advanced 644 645 Concepts: vectors^ghosted 646 647 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 648 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 649 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 650 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 651 652 @*/ 653 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 654 { 655 PetscErrorCode ierr; 656 657 PetscFunctionBegin; 658 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 659 PetscFunctionReturn(0); 660 } 661 662 #undef __FUNCT__ 663 #define __FUNCT__ "VecDuplicate_MPI" 664 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 665 { 666 PetscErrorCode ierr; 667 Vec_MPI *vw,*w = (Vec_MPI *)win->data; 668 PetscScalar *array; 669 670 PetscFunctionBegin; 671 ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr); 672 673 /* use the map that exists aleady in win */ 674 ierr = PetscLayoutDestroy((*v)->map);CHKERRQ(ierr); 675 (*v)->map = win->map; 676 win->map->refcnt++; 677 678 ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr); 679 vw = (Vec_MPI *)(*v)->data; 680 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 681 682 /* save local representation of the parallel vector (and scatter) if it exists */ 683 if (w->localrep) { 684 ierr = VecGetArrayPrivate(*v,&array);CHKERRQ(ierr); 685 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 686 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 687 ierr = VecRestoreArrayPrivate(*v,&array);CHKERRQ(ierr); 688 ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr); 689 vw->localupdate = w->localupdate; 690 if (vw->localupdate) { 691 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 692 } 693 } 694 695 /* New vector should inherit stashing property of parent */ 696 (*v)->stash.donotstash = win->stash.donotstash; 697 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 698 699 ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr); 700 ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr); 701 if (win->mapping) { 702 ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr); 703 (*v)->mapping = win->mapping; 704 } 705 if (win->bmapping) { 706 ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr); 707 (*v)->bmapping = win->bmapping; 708 } 709 (*v)->map->bs = win->map->bs; 710 (*v)->bstash.bs = win->bstash.bs; 711 712 PetscFunctionReturn(0); 713 } 714 715 /* ------------------------------------------------------------------------------------------*/ 716 #undef __FUNCT__ 717 #define __FUNCT__ "VecCreateGhostBlockWithArray" 718 /*@C 719 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 720 the caller allocates the array space. Indices in the ghost region are based on blocks. 721 722 Collective on MPI_Comm 723 724 Input Parameters: 725 + comm - the MPI communicator to use 726 . bs - block size 727 . n - local vector length 728 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 729 . nghost - number of local ghost blocks 730 . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed) 731 - array - the space to store the vector values (as long as n + nghost*bs) 732 733 Output Parameter: 734 . vv - the global vector representation (without ghost points as part of vector) 735 736 Notes: 737 Use VecGhostGetLocalForm() to access the local, ghosted representation 738 of the vector. 739 740 n is the local vector size (total local size not the number of blocks) while nghost 741 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 742 portion is bs*nghost 743 744 Level: advanced 745 746 Concepts: vectors^creating ghosted 747 Concepts: vectors^creating with array 748 749 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 750 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 751 VecCreateGhostWithArray(), VecCreateGhostBlocked() 752 753 @*/ 754 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 755 { 756 PetscErrorCode ierr; 757 Vec_MPI *w; 758 PetscScalar *larray; 759 IS from,to; 760 ISLocalToGlobalMapping ltog; 761 PetscInt rstart,i,nb,*indices; 762 763 PetscFunctionBegin; 764 *vv = 0; 765 766 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 767 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 768 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 769 if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size"); 770 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 771 /* Create global representation */ 772 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 773 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 774 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr); 775 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 776 w = (Vec_MPI *)(*vv)->data; 777 /* Create local representation */ 778 ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr); 779 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 780 ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr); 781 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 782 ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr); 783 784 /* 785 Create scatter context for scattering (updating) ghost values 786 */ 787 ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr); 788 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 789 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 790 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 791 ierr = ISDestroy(to);CHKERRQ(ierr); 792 ierr = ISDestroy(from);CHKERRQ(ierr); 793 794 /* set local to global mapping for ghosted vector */ 795 nb = n/bs; 796 ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 797 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 798 for (i=0; i<nb; i++) { 799 indices[i] = rstart + i*bs; 800 } 801 for (i=0; i<nghost; i++) { 802 indices[nb+i] = ghosts[i]; 803 } 804 ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,<og);CHKERRQ(ierr); 805 ierr = PetscFree(indices);CHKERRQ(ierr); 806 ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr); 807 ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr); 808 ierr = PetscFree(indices);CHKERRQ(ierr); 809 810 PetscFunctionReturn(0); 811 } 812 813 #undef __FUNCT__ 814 #define __FUNCT__ "VecCreateGhostBlock" 815 /*@ 816 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 817 The indicing of the ghost points is done with blocks. 818 819 Collective on MPI_Comm 820 821 Input Parameters: 822 + comm - the MPI communicator to use 823 . bs - the block size 824 . n - local vector length 825 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 826 . nghost - number of local ghost blocks 827 - ghosts - global indices of ghost blocks 828 829 Output Parameter: 830 . vv - the global vector representation (without ghost points as part of vector) 831 832 Notes: 833 Use VecGhostGetLocalForm() to access the local, ghosted representation 834 of the vector. 835 836 n is the local vector size (total local size not the number of blocks) while nghost 837 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 838 portion is bs*nghost 839 840 Level: advanced 841 842 Concepts: vectors^ghosted 843 844 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 845 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 846 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 847 848 @*/ 849 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 850 { 851 PetscErrorCode ierr; 852 853 PetscFunctionBegin; 854 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 855 PetscFunctionReturn(0); 856 } 857 858 /* 859 These introduce a ghosted vector where the ghosting is determined by the call to 860 VecSetLocalToGlobalMapping() 861 */ 862 863 #undef __FUNCT__ 864 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI" 865 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map) 866 { 867 PetscErrorCode ierr; 868 Vec_MPI *v = (Vec_MPI *)vv->data; 869 870 PetscFunctionBegin; 871 v->nghost = map->n - vv->map->n; 872 873 /* we need to make longer the array space that was allocated when the vector was created */ 874 ierr = PetscFree(v->array_allocated);CHKERRQ(ierr); 875 ierr = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr); 876 v->array = v->array_allocated; 877 878 /* Create local representation */ 879 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr); 880 ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr); 881 PetscFunctionReturn(0); 882 } 883 884 885 #undef __FUNCT__ 886 #define __FUNCT__ "VecSetValuesLocal_FETI" 887 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode) 888 { 889 PetscErrorCode ierr; 890 Vec_MPI *v = (Vec_MPI *)vv->data; 891 892 PetscFunctionBegin; 893 ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr); 894 PetscFunctionReturn(0); 895 } 896 897 EXTERN_C_BEGIN 898 #undef __FUNCT__ 899 #define __FUNCT__ "VecCreate_FETI" 900 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv) 901 { 902 PetscErrorCode ierr; 903 904 PetscFunctionBegin; 905 ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr); 906 907 /* overwrite the functions to handle setting values locally */ 908 vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI; 909 vv->ops->setvalueslocal = VecSetValuesLocal_FETI; 910 vv->ops->assemblybegin = 0; 911 vv->ops->assemblyend = 0; 912 vv->ops->setvaluesblocked = 0; 913 vv->ops->setvaluesblocked = 0; 914 915 PetscFunctionReturn(0); 916 } 917 EXTERN_C_END 918 919 920 921 922 923 924 925 926 927