1 #define PETSCVEC_DLL 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include "../src/vec/vec/impls/mpi/pvecimpl.h" /*I "petscvec.h" I*/ 6 7 #if 0 8 #undef __FUNCT__ 9 #define __FUNCT__ "VecPublish_MPI" 10 static PetscErrorCode VecPublish_MPI(PetscObject obj) 11 { 12 PetscFunctionBegin; 13 PetscFunctionReturn(0); 14 } 15 #endif 16 17 #undef __FUNCT__ 18 #define __FUNCT__ "VecDot_MPI" 19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 20 { 21 PetscScalar sum,work; 22 PetscErrorCode ierr; 23 24 PetscFunctionBegin; 25 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 26 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 27 *z = sum; 28 PetscFunctionReturn(0); 29 } 30 31 #undef __FUNCT__ 32 #define __FUNCT__ "VecTDot_MPI" 33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 34 { 35 PetscScalar sum,work; 36 PetscErrorCode ierr; 37 38 PetscFunctionBegin; 39 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 40 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 41 *z = sum; 42 PetscFunctionReturn(0); 43 } 44 45 #undef __FUNCT__ 46 #define __FUNCT__ "VecSetOption_MPI" 47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag) 48 { 49 PetscFunctionBegin; 50 if (op == VEC_IGNORE_OFF_PROC_ENTRIES) { 51 v->stash.donotstash = flag; 52 } else if (op == VEC_IGNORE_NEGATIVE_INDICES) { 53 v->stash.ignorenegidx = flag; 54 } 55 PetscFunctionReturn(0); 56 } 57 58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *); 59 EXTERN_C_BEGIN 60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 61 EXTERN_C_END 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "VecPlaceArray_MPI" 65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 66 { 67 PetscErrorCode ierr; 68 Vec_MPI *v = (Vec_MPI *)vin->data; 69 70 PetscFunctionBegin; 71 if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 72 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 73 v->array = (PetscScalar *)a; 74 if (v->localrep) { 75 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 76 } 77 PetscFunctionReturn(0); 78 } 79 80 #undef __FUNCT__ 81 #define __FUNCT__ "VecResetArray_MPI" 82 PetscErrorCode VecResetArray_MPI(Vec vin) 83 { 84 Vec_MPI *v = (Vec_MPI *)vin->data; 85 PetscErrorCode ierr; 86 87 PetscFunctionBegin; 88 v->array = v->unplacedarray; 89 v->unplacedarray = 0; 90 if (v->localrep) { 91 ierr = VecResetArray(v->localrep);CHKERRQ(ierr); 92 } 93 PetscFunctionReturn(0); 94 } 95 96 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*); 97 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []); 98 99 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */ 100 VecDuplicateVecs_Default, 101 VecDestroyVecs_Default, 102 VecDot_MPI, 103 VecMDot_MPI, 104 VecNorm_MPI, 105 VecTDot_MPI, 106 VecMTDot_MPI, 107 VecScale_Seq, 108 VecCopy_Seq, /* 10 */ 109 VecSet_Seq, 110 VecSwap_Seq, 111 VecAXPY_Seq, 112 VecAXPBY_Seq, 113 VecMAXPY_Seq, 114 VecAYPX_Seq, 115 VecWAXPY_Seq, 116 VecAXPBYPCZ_Seq, 117 VecPointwiseMult_Seq, 118 VecPointwiseDivide_Seq, 119 VecSetValues_MPI, /* 20 */ 120 VecAssemblyBegin_MPI, 121 VecAssemblyEnd_MPI, 122 VecGetArray_Seq, 123 VecGetSize_MPI, 124 VecGetSize_Seq, 125 VecRestoreArray_Seq, 126 VecMax_MPI, 127 VecMin_MPI, 128 VecSetRandom_Seq, 129 VecSetOption_MPI, 130 VecSetValuesBlocked_MPI, 131 VecDestroy_MPI, 132 VecView_MPI, 133 VecPlaceArray_MPI, 134 VecReplaceArray_Seq, 135 VecDot_Seq, 136 VecTDot_Seq, 137 VecNorm_Seq, 138 VecMDot_Seq, 139 VecMTDot_Seq, 140 VecLoadIntoVector_Default, 141 0, /* VecLoadIntoVectorNative */ 142 VecReciprocal_Default, 143 0, /* VecViewNative... */ 144 VecConjugate_Seq, 145 0, 146 0, 147 VecResetArray_MPI, 148 0, 149 VecMaxPointwiseDivide_Seq, 150 VecLoad_Binary, 151 VecPointwiseMax_Seq, 152 VecPointwiseMaxAbs_Seq, 153 VecPointwiseMin_Seq, 154 VecGetValues_MPI}; 155 156 #undef __FUNCT__ 157 #define __FUNCT__ "VecCreate_MPI_Private" 158 /* 159 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 160 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 161 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 162 163 If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise 164 no space is allocated. 165 */ 166 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[]) 167 { 168 Vec_MPI *s; 169 PetscErrorCode ierr; 170 171 PetscFunctionBegin; 172 173 ierr = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr); 174 v->data = (void*)s; 175 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 176 s->nghost = nghost; 177 v->mapping = 0; 178 v->bmapping = 0; 179 v->petscnative = PETSC_TRUE; 180 181 if (v->map->bs == -1) v->map->bs = 1; 182 ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr); 183 s->array = (PetscScalar *)array; 184 s->array_allocated = 0; 185 if (alloc && !array) { 186 PetscInt n = v->map->n+nghost; 187 ierr = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr); 188 ierr = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr); 189 ierr = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr); 190 s->array_allocated = s->array; 191 } 192 193 /* By default parallel vectors do not have local representation */ 194 s->localrep = 0; 195 s->localupdate = 0; 196 197 v->stash.insertmode = NOT_SET_VALUES; 198 /* create the stashes. The block-size for bstash is set later when 199 VecSetValuesBlocked is called. 200 */ 201 ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr); 202 ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr); 203 204 #if defined(PETSC_HAVE_MATLAB_ENGINE) 205 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr); 206 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr); 207 #endif 208 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 209 ierr = PetscPublishAll(v);CHKERRQ(ierr); 210 PetscFunctionReturn(0); 211 } 212 213 /*MC 214 VECMPI - VECMPI = "mpi" - The basic parallel vector 215 216 Options Database Keys: 217 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 218 219 Level: beginner 220 221 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 222 M*/ 223 224 EXTERN_C_BEGIN 225 #undef __FUNCT__ 226 #define __FUNCT__ "VecCreate_MPI" 227 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv) 228 { 229 PetscErrorCode ierr; 230 231 PetscFunctionBegin; 232 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr); 233 PetscFunctionReturn(0); 234 } 235 EXTERN_C_END 236 237 #undef __FUNCT__ 238 #define __FUNCT__ "VecCreateMPIWithArray" 239 /*@C 240 VecCreateMPIWithArray - Creates a parallel, array-style vector, 241 where the user provides the array space to store the vector values. 242 243 Collective on MPI_Comm 244 245 Input Parameters: 246 + comm - the MPI communicator to use 247 . n - local vector length, cannot be PETSC_DECIDE 248 . N - global vector length (or PETSC_DECIDE to have calculated) 249 - array - the user provided array to store the vector values 250 251 Output Parameter: 252 . vv - the vector 253 254 Notes: 255 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 256 same type as an existing vector. 257 258 If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used 259 at a later stage to SET the array for storing the vector values. 260 261 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 262 The user should not free the array until the vector is destroyed. 263 264 Level: intermediate 265 266 Concepts: vectors^creating with array 267 268 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 269 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 270 271 @*/ 272 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 273 { 274 PetscErrorCode ierr; 275 276 PetscFunctionBegin; 277 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 278 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 279 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 280 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 281 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 282 PetscFunctionReturn(0); 283 } 284 285 #undef __FUNCT__ 286 #define __FUNCT__ "VecGhostStateSync_Private" 287 /* 288 This is used in VecGhostGetLocalForm and VecGhostRestoreLocalForm to ensure 289 that the state is updated if either vector has changed since the last time 290 one of these functions was called. It could apply to any PetscObject, but 291 VecGhost is quite different from other objects in that two separate vectors 292 look at the same memory. 293 294 In principle, we could only propagate state to the local vector on 295 GetLocalForm and to the global vector on RestoreLocalForm, but this version is 296 more conservative (i.e. robust against misuse) and simpler. 297 298 Note that this function is correct and changes nothing if both arguments are the 299 same, which is the case in serial. 300 */ 301 static PetscErrorCode VecGhostStateSync_Private(Vec g,Vec l) 302 { 303 PetscErrorCode ierr; 304 PetscInt gstate,lstate; 305 306 PetscFunctionBegin; 307 ierr = PetscObjectStateQuery((PetscObject)g,&gstate);CHKERRQ(ierr); 308 ierr = PetscObjectStateQuery((PetscObject)l,&lstate);CHKERRQ(ierr); 309 ierr = PetscObjectSetState((PetscObject)g,PetscMax(gstate,lstate));CHKERRQ(ierr); 310 ierr = PetscObjectSetState((PetscObject)l,PetscMax(gstate,lstate));CHKERRQ(ierr); 311 PetscFunctionReturn(0); 312 } 313 314 #undef __FUNCT__ 315 #define __FUNCT__ "VecGhostGetLocalForm" 316 /*@ 317 VecGhostGetLocalForm - Obtains the local ghosted representation of 318 a parallel vector created with VecCreateGhost(). 319 320 Not Collective 321 322 Input Parameter: 323 . g - the global vector. Vector must be have been obtained with either 324 VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq(). 325 326 Output Parameter: 327 . l - the local (ghosted) representation 328 329 Notes: 330 This routine does not actually update the ghost values, but rather it 331 returns a sequential vector that includes the locations for the ghost 332 values and their current values. The returned vector and the original 333 vector passed in share the same array that contains the actual vector data. 334 335 One should call VecGhostRestoreLocalForm() or VecDestroy() once one is 336 finished using the object. 337 338 Level: advanced 339 340 Concepts: vectors^ghost point access 341 342 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray() 343 344 @*/ 345 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l) 346 { 347 PetscErrorCode ierr; 348 PetscTruth isseq,ismpi; 349 350 PetscFunctionBegin; 351 PetscValidHeaderSpecific(g,VEC_CLASSID,1); 352 PetscValidPointer(l,2); 353 354 ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr); 355 ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr); 356 if (ismpi) { 357 Vec_MPI *v = (Vec_MPI*)g->data; 358 if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 359 *l = v->localrep; 360 } else if (isseq) { 361 *l = g; 362 } else { 363 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name); 364 } 365 ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr); 366 ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr); 367 PetscFunctionReturn(0); 368 } 369 370 #undef __FUNCT__ 371 #define __FUNCT__ "VecGhostRestoreLocalForm" 372 /*@ 373 VecGhostRestoreLocalForm - Restores the local ghosted representation of 374 a parallel vector obtained with VecGhostGetLocalForm(). 375 376 Not Collective 377 378 Input Parameter: 379 + g - the global vector 380 - l - the local (ghosted) representation 381 382 Notes: 383 This routine does not actually update the ghost values, but rather it 384 returns a sequential vector that includes the locations for the ghost values 385 and their current values. 386 387 Level: advanced 388 389 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray() 390 @*/ 391 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l) 392 { 393 PetscErrorCode ierr; 394 395 PetscFunctionBegin; 396 ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr); 397 ierr = PetscObjectDereference((PetscObject)*l);CHKERRQ(ierr); 398 PetscFunctionReturn(0); 399 } 400 401 #undef __FUNCT__ 402 #define __FUNCT__ "VecGhostUpdateBegin" 403 /*@ 404 VecGhostUpdateBegin - Begins the vector scatter to update the vector from 405 local representation to global or global representation to local. 406 407 Collective on Vec 408 409 Input Parameters: 410 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 411 . insertmode - one of ADD_VALUES or INSERT_VALUES 412 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 413 414 Notes: 415 Use the following to update the ghost regions with correct values from the owning process 416 .vb 417 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 418 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 419 .ve 420 421 Use the following to accumulate the ghost region values onto the owning processors 422 .vb 423 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 424 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 425 .ve 426 427 To accumulate the ghost region values onto the owning processors and then update 428 the ghost regions correctly, call the later followed by the former, i.e., 429 .vb 430 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 431 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 432 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 433 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 434 .ve 435 436 Level: advanced 437 438 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(), 439 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 440 441 @*/ 442 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode) 443 { 444 Vec_MPI *v; 445 PetscErrorCode ierr; 446 447 PetscFunctionBegin; 448 PetscValidHeaderSpecific(g,VEC_CLASSID,1); 449 450 v = (Vec_MPI*)g->data; 451 if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 452 if (!v->localupdate) PetscFunctionReturn(0); 453 454 if (scattermode == SCATTER_REVERSE) { 455 ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr); 456 } else { 457 ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr); 458 } 459 PetscFunctionReturn(0); 460 } 461 462 #undef __FUNCT__ 463 #define __FUNCT__ "VecGhostUpdateEnd" 464 /*@ 465 VecGhostUpdateEnd - End the vector scatter to update the vector from 466 local representation to global or global representation to local. 467 468 Collective on Vec 469 470 Input Parameters: 471 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 472 . insertmode - one of ADD_VALUES or INSERT_VALUES 473 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 474 475 Notes: 476 477 Use the following to update the ghost regions with correct values from the owning process 478 .vb 479 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 480 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 481 .ve 482 483 Use the following to accumulate the ghost region values onto the owning processors 484 .vb 485 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 486 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 487 .ve 488 489 To accumulate the ghost region values onto the owning processors and then update 490 the ghost regions correctly, call the later followed by the former, i.e., 491 .vb 492 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 493 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 494 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 495 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 496 .ve 497 498 Level: advanced 499 500 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(), 501 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 502 503 @*/ 504 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode) 505 { 506 Vec_MPI *v; 507 PetscErrorCode ierr; 508 509 PetscFunctionBegin; 510 PetscValidHeaderSpecific(g,VEC_CLASSID,1); 511 512 v = (Vec_MPI*)g->data; 513 if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 514 if (!v->localupdate) PetscFunctionReturn(0); 515 516 if (scattermode == SCATTER_REVERSE) { 517 ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr); 518 } else { 519 ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr); 520 } 521 PetscFunctionReturn(0); 522 } 523 524 #undef __FUNCT__ 525 #define __FUNCT__ "VecCreateGhostWithArray" 526 /*@C 527 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 528 the caller allocates the array space. 529 530 Collective on MPI_Comm 531 532 Input Parameters: 533 + comm - the MPI communicator to use 534 . n - local vector length 535 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 536 . nghost - number of local ghost points 537 . ghosts - global indices of ghost points (or PETSC_NULL if not needed) 538 - array - the space to store the vector values (as long as n + nghost) 539 540 Output Parameter: 541 . vv - the global vector representation (without ghost points as part of vector) 542 543 Notes: 544 Use VecGhostGetLocalForm() to access the local, ghosted representation 545 of the vector. 546 547 This also automatically sets the ISLocalToGlobalMapping() for this vector. 548 549 Level: advanced 550 551 Concepts: vectors^creating with array 552 Concepts: vectors^ghosted 553 554 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 555 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 556 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 557 558 @*/ 559 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 560 { 561 PetscErrorCode ierr; 562 Vec_MPI *w; 563 PetscScalar *larray; 564 IS from,to; 565 ISLocalToGlobalMapping ltog; 566 PetscInt rstart,i,*indices; 567 568 PetscFunctionBegin; 569 *vv = 0; 570 571 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 572 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 573 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 574 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 575 /* Create global representation */ 576 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 577 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 578 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr); 579 w = (Vec_MPI *)(*vv)->data; 580 /* Create local representation */ 581 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 582 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 583 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 584 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 585 586 /* 587 Create scatter context for scattering (updating) ghost values 588 */ 589 ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr); 590 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 591 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 592 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 593 ierr = ISDestroy(to);CHKERRQ(ierr); 594 ierr = ISDestroy(from);CHKERRQ(ierr); 595 596 /* set local to global mapping for ghosted vector */ 597 ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 598 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 599 for (i=0; i<n; i++) { 600 indices[i] = rstart + i; 601 } 602 for (i=0; i<nghost; i++) { 603 indices[n+i] = ghosts[i]; 604 } 605 ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,<og);CHKERRQ(ierr); 606 ierr = PetscFree(indices);CHKERRQ(ierr); 607 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 608 ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr); 609 ierr = PetscFree(indices);CHKERRQ(ierr); 610 PetscFunctionReturn(0); 611 } 612 613 #undef __FUNCT__ 614 #define __FUNCT__ "VecCreateGhost" 615 /*@ 616 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 617 618 Collective on MPI_Comm 619 620 Input Parameters: 621 + comm - the MPI communicator to use 622 . n - local vector length 623 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 624 . nghost - number of local ghost points 625 - ghosts - global indices of ghost points 626 627 Output Parameter: 628 . vv - the global vector representation (without ghost points as part of vector) 629 630 Notes: 631 Use VecGhostGetLocalForm() to access the local, ghosted representation 632 of the vector. 633 634 This also automatically sets the ISLocalToGlobalMapping() for this vector. 635 636 Level: advanced 637 638 Concepts: vectors^ghosted 639 640 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 641 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 642 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 643 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 644 645 @*/ 646 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 647 { 648 PetscErrorCode ierr; 649 650 PetscFunctionBegin; 651 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 652 PetscFunctionReturn(0); 653 } 654 655 #undef __FUNCT__ 656 #define __FUNCT__ "VecDuplicate_MPI" 657 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 658 { 659 PetscErrorCode ierr; 660 Vec_MPI *vw,*w = (Vec_MPI *)win->data; 661 PetscScalar *array; 662 663 PetscFunctionBegin; 664 ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr); 665 666 /* use the map that exists aleady in win */ 667 ierr = PetscLayoutDestroy((*v)->map);CHKERRQ(ierr); 668 (*v)->map = win->map; 669 win->map->refcnt++; 670 671 ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr); 672 vw = (Vec_MPI *)(*v)->data; 673 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 674 675 /* save local representation of the parallel vector (and scatter) if it exists */ 676 if (w->localrep) { 677 ierr = VecGetArray(*v,&array);CHKERRQ(ierr); 678 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 679 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 680 ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr); 681 ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr); 682 vw->localupdate = w->localupdate; 683 if (vw->localupdate) { 684 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 685 } 686 } 687 688 /* New vector should inherit stashing property of parent */ 689 (*v)->stash.donotstash = win->stash.donotstash; 690 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 691 692 ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr); 693 ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr); 694 if (win->mapping) { 695 ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr); 696 (*v)->mapping = win->mapping; 697 } 698 if (win->bmapping) { 699 ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr); 700 (*v)->bmapping = win->bmapping; 701 } 702 (*v)->map->bs = win->map->bs; 703 (*v)->bstash.bs = win->bstash.bs; 704 705 PetscFunctionReturn(0); 706 } 707 708 /* ------------------------------------------------------------------------------------------*/ 709 #undef __FUNCT__ 710 #define __FUNCT__ "VecCreateGhostBlockWithArray" 711 /*@C 712 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 713 the caller allocates the array space. Indices in the ghost region are based on blocks. 714 715 Collective on MPI_Comm 716 717 Input Parameters: 718 + comm - the MPI communicator to use 719 . bs - block size 720 . n - local vector length 721 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 722 . nghost - number of local ghost blocks 723 . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed) 724 - array - the space to store the vector values (as long as n + nghost*bs) 725 726 Output Parameter: 727 . vv - the global vector representation (without ghost points as part of vector) 728 729 Notes: 730 Use VecGhostGetLocalForm() to access the local, ghosted representation 731 of the vector. 732 733 n is the local vector size (total local size not the number of blocks) while nghost 734 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 735 portion is bs*nghost 736 737 Level: advanced 738 739 Concepts: vectors^creating ghosted 740 Concepts: vectors^creating with array 741 742 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 743 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 744 VecCreateGhostWithArray(), VecCreateGhostBlocked() 745 746 @*/ 747 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 748 { 749 PetscErrorCode ierr; 750 Vec_MPI *w; 751 PetscScalar *larray; 752 IS from,to; 753 ISLocalToGlobalMapping ltog; 754 PetscInt rstart,i,nb,*indices; 755 756 PetscFunctionBegin; 757 *vv = 0; 758 759 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 760 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 761 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 762 if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size"); 763 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 764 /* Create global representation */ 765 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 766 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 767 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr); 768 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 769 w = (Vec_MPI *)(*vv)->data; 770 /* Create local representation */ 771 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 772 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 773 ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr); 774 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 775 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 776 777 /* 778 Create scatter context for scattering (updating) ghost values 779 */ 780 ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr); 781 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 782 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 783 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 784 ierr = ISDestroy(to);CHKERRQ(ierr); 785 ierr = ISDestroy(from);CHKERRQ(ierr); 786 787 /* set local to global mapping for ghosted vector */ 788 nb = n/bs; 789 ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 790 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 791 for (i=0; i<nb; i++) { 792 indices[i] = rstart + i*bs; 793 } 794 for (i=0; i<nghost; i++) { 795 indices[nb+i] = ghosts[i]; 796 } 797 ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,<og);CHKERRQ(ierr); 798 ierr = PetscFree(indices);CHKERRQ(ierr); 799 ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr); 800 ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr); 801 ierr = PetscFree(indices);CHKERRQ(ierr); 802 803 PetscFunctionReturn(0); 804 } 805 806 #undef __FUNCT__ 807 #define __FUNCT__ "VecCreateGhostBlock" 808 /*@ 809 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 810 The indicing of the ghost points is done with blocks. 811 812 Collective on MPI_Comm 813 814 Input Parameters: 815 + comm - the MPI communicator to use 816 . bs - the block size 817 . n - local vector length 818 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 819 . nghost - number of local ghost blocks 820 - ghosts - global indices of ghost blocks 821 822 Output Parameter: 823 . vv - the global vector representation (without ghost points as part of vector) 824 825 Notes: 826 Use VecGhostGetLocalForm() to access the local, ghosted representation 827 of the vector. 828 829 n is the local vector size (total local size not the number of blocks) while nghost 830 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 831 portion is bs*nghost 832 833 Level: advanced 834 835 Concepts: vectors^ghosted 836 837 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 838 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 839 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 840 841 @*/ 842 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 843 { 844 PetscErrorCode ierr; 845 846 PetscFunctionBegin; 847 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 848 PetscFunctionReturn(0); 849 } 850 851 /* 852 These introduce a ghosted vector where the ghosting is determined by the call to 853 VecSetLocalToGlobalMapping() 854 */ 855 856 #undef __FUNCT__ 857 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI" 858 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map) 859 { 860 PetscErrorCode ierr; 861 Vec_MPI *v = (Vec_MPI *)vv->data; 862 863 PetscFunctionBegin; 864 v->nghost = map->n - vv->map->n; 865 866 /* we need to make longer the array space that was allocated when the vector was created */ 867 ierr = PetscFree(v->array_allocated);CHKERRQ(ierr); 868 ierr = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr); 869 v->array = v->array_allocated; 870 871 /* Create local representation */ 872 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr); 873 ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr); 874 PetscFunctionReturn(0); 875 } 876 877 878 #undef __FUNCT__ 879 #define __FUNCT__ "VecSetValuesLocal_FETI" 880 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode) 881 { 882 PetscErrorCode ierr; 883 Vec_MPI *v = (Vec_MPI *)vv->data; 884 885 PetscFunctionBegin; 886 ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr); 887 PetscFunctionReturn(0); 888 } 889 890 EXTERN_C_BEGIN 891 #undef __FUNCT__ 892 #define __FUNCT__ "VecCreate_FETI" 893 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv) 894 { 895 PetscErrorCode ierr; 896 897 PetscFunctionBegin; 898 ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr); 899 900 /* overwrite the functions to handle setting values locally */ 901 vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI; 902 vv->ops->setvalueslocal = VecSetValuesLocal_FETI; 903 vv->ops->assemblybegin = 0; 904 vv->ops->assemblyend = 0; 905 vv->ops->setvaluesblocked = 0; 906 vv->ops->setvaluesblocked = 0; 907 908 PetscFunctionReturn(0); 909 } 910 EXTERN_C_END 911 912 913 914 915 916 917 918 919 920