1 #define PETSCVEC_DLL 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include "../src/vec/vec/impls/mpi/pvecimpl.h" /*I "petscvec.h" I*/ 6 7 #if 0 8 #undef __FUNCT__ 9 #define __FUNCT__ "VecPublish_MPI" 10 static PetscErrorCode VecPublish_MPI(PetscObject obj) 11 { 12 PetscFunctionBegin; 13 PetscFunctionReturn(0); 14 } 15 #endif 16 17 #undef __FUNCT__ 18 #define __FUNCT__ "VecDot_MPI" 19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 20 { 21 PetscScalar sum,work; 22 PetscErrorCode ierr; 23 24 PetscFunctionBegin; 25 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 26 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 27 *z = sum; 28 PetscFunctionReturn(0); 29 } 30 31 #undef __FUNCT__ 32 #define __FUNCT__ "VecTDot_MPI" 33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 34 { 35 PetscScalar sum,work; 36 PetscErrorCode ierr; 37 38 PetscFunctionBegin; 39 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 40 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 41 *z = sum; 42 PetscFunctionReturn(0); 43 } 44 45 #undef __FUNCT__ 46 #define __FUNCT__ "VecSetOption_MPI" 47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag) 48 { 49 PetscFunctionBegin; 50 if (op == VEC_IGNORE_OFF_PROC_ENTRIES) { 51 v->stash.donotstash = flag; 52 } else if (op == VEC_IGNORE_NEGATIVE_INDICES) { 53 v->stash.ignorenegidx = flag; 54 } 55 PetscFunctionReturn(0); 56 } 57 58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *); 59 EXTERN_C_BEGIN 60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 61 EXTERN_C_END 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "VecPlaceArray_MPI" 65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 66 { 67 PetscErrorCode ierr; 68 Vec_MPI *v = (Vec_MPI *)vin->data; 69 70 PetscFunctionBegin; 71 if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 72 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 73 v->array = (PetscScalar *)a; 74 if (v->localrep) { 75 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 76 } 77 PetscFunctionReturn(0); 78 } 79 80 #undef __FUNCT__ 81 #define __FUNCT__ "VecResetArray_MPI" 82 PetscErrorCode VecResetArray_MPI(Vec vin) 83 { 84 Vec_MPI *v = (Vec_MPI *)vin->data; 85 PetscErrorCode ierr; 86 87 PetscFunctionBegin; 88 v->array = v->unplacedarray; 89 v->unplacedarray = 0; 90 if (v->localrep) { 91 ierr = VecResetArray(v->localrep);CHKERRQ(ierr); 92 } 93 PetscFunctionReturn(0); 94 } 95 96 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*); 97 EXTERN PetscErrorCode VecLoadnew_Binary(PetscViewer,Vec); 98 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []); 99 100 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */ 101 VecDuplicateVecs_Default, 102 VecDestroyVecs_Default, 103 VecDot_MPI, 104 VecMDot_MPI, 105 VecNorm_MPI, 106 VecTDot_MPI, 107 VecMTDot_MPI, 108 VecScale_Seq, 109 VecCopy_Seq, /* 10 */ 110 VecSet_Seq, 111 VecSwap_Seq, 112 VecAXPY_Seq, 113 VecAXPBY_Seq, 114 VecMAXPY_Seq, 115 VecAYPX_Seq, 116 VecWAXPY_Seq, 117 VecAXPBYPCZ_Seq, 118 VecPointwiseMult_Seq, 119 VecPointwiseDivide_Seq, 120 VecSetValues_MPI, /* 20 */ 121 VecAssemblyBegin_MPI, 122 VecAssemblyEnd_MPI, 123 VecGetArray_Seq, 124 VecGetSize_MPI, 125 VecGetSize_Seq, 126 VecRestoreArray_Seq, 127 VecMax_MPI, 128 VecMin_MPI, 129 VecSetRandom_Seq, 130 VecSetOption_MPI, 131 VecSetValuesBlocked_MPI, 132 VecDestroy_MPI, 133 VecView_MPI, 134 VecPlaceArray_MPI, 135 VecReplaceArray_Seq, 136 VecDot_Seq, 137 VecTDot_Seq, 138 VecNorm_Seq, 139 VecMDot_Seq, 140 VecMTDot_Seq, 141 VecLoadIntoVector_Default, 142 0, /* VecLoadIntoVectorNative */ 143 VecReciprocal_Default, 144 0, /* VecViewNative... */ 145 VecConjugate_Seq, 146 0, 147 0, 148 VecResetArray_MPI, 149 0, 150 VecMaxPointwiseDivide_Seq, 151 VecLoad_Binary, 152 VecLoadnew_Binary, 153 VecPointwiseMax_Seq, 154 VecPointwiseMaxAbs_Seq, 155 VecPointwiseMin_Seq, 156 VecGetValues_MPI, 157 0, 158 0, 159 0, 160 0, 161 0, 162 0, 163 VecStrideGather_Default, 164 VecStrideScatter_Default 165 }; 166 167 #undef __FUNCT__ 168 #define __FUNCT__ "VecCreate_MPI_Private" 169 /* 170 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 171 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 172 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 173 174 If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise 175 no space is allocated. 176 */ 177 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[]) 178 { 179 Vec_MPI *s; 180 PetscErrorCode ierr; 181 182 PetscFunctionBegin; 183 184 ierr = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr); 185 v->data = (void*)s; 186 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 187 s->nghost = nghost; 188 v->mapping = 0; 189 v->bmapping = 0; 190 v->petscnative = PETSC_TRUE; 191 192 if (v->map->bs == -1) v->map->bs = 1; 193 ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr); 194 s->array = (PetscScalar *)array; 195 s->array_allocated = 0; 196 if (alloc && !array) { 197 PetscInt n = v->map->n+nghost; 198 ierr = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr); 199 ierr = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr); 200 ierr = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr); 201 s->array_allocated = s->array; 202 } 203 204 /* By default parallel vectors do not have local representation */ 205 s->localrep = 0; 206 s->localupdate = 0; 207 208 v->stash.insertmode = NOT_SET_VALUES; 209 /* create the stashes. The block-size for bstash is set later when 210 VecSetValuesBlocked is called. 211 */ 212 ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr); 213 ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr); 214 215 #if defined(PETSC_HAVE_MATLAB_ENGINE) 216 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr); 217 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr); 218 #endif 219 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 220 ierr = PetscPublishAll(v);CHKERRQ(ierr); 221 PetscFunctionReturn(0); 222 } 223 224 /*MC 225 VECMPI - VECMPI = "mpi" - The basic parallel vector 226 227 Options Database Keys: 228 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 229 230 Level: beginner 231 232 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 233 M*/ 234 235 EXTERN_C_BEGIN 236 #undef __FUNCT__ 237 #define __FUNCT__ "VecCreate_MPI" 238 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv) 239 { 240 PetscErrorCode ierr; 241 242 PetscFunctionBegin; 243 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr); 244 PetscFunctionReturn(0); 245 } 246 EXTERN_C_END 247 248 #undef __FUNCT__ 249 #define __FUNCT__ "VecCreateMPIWithArray" 250 /*@C 251 VecCreateMPIWithArray - Creates a parallel, array-style vector, 252 where the user provides the array space to store the vector values. 253 254 Collective on MPI_Comm 255 256 Input Parameters: 257 + comm - the MPI communicator to use 258 . n - local vector length, cannot be PETSC_DECIDE 259 . N - global vector length (or PETSC_DECIDE to have calculated) 260 - array - the user provided array to store the vector values 261 262 Output Parameter: 263 . vv - the vector 264 265 Notes: 266 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 267 same type as an existing vector. 268 269 If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used 270 at a later stage to SET the array for storing the vector values. 271 272 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 273 The user should not free the array until the vector is destroyed. 274 275 Level: intermediate 276 277 Concepts: vectors^creating with array 278 279 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 280 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 281 282 @*/ 283 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 284 { 285 PetscErrorCode ierr; 286 287 PetscFunctionBegin; 288 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 289 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 290 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 291 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 292 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 293 PetscFunctionReturn(0); 294 } 295 296 #undef __FUNCT__ 297 #define __FUNCT__ "VecGhostStateSync_Private" 298 /* 299 This is used in VecGhostGetLocalForm and VecGhostRestoreLocalForm to ensure 300 that the state is updated if either vector has changed since the last time 301 one of these functions was called. It could apply to any PetscObject, but 302 VecGhost is quite different from other objects in that two separate vectors 303 look at the same memory. 304 305 In principle, we could only propagate state to the local vector on 306 GetLocalForm and to the global vector on RestoreLocalForm, but this version is 307 more conservative (i.e. robust against misuse) and simpler. 308 309 Note that this function is correct and changes nothing if both arguments are the 310 same, which is the case in serial. 311 */ 312 static PetscErrorCode VecGhostStateSync_Private(Vec g,Vec l) 313 { 314 PetscErrorCode ierr; 315 PetscInt gstate,lstate; 316 317 PetscFunctionBegin; 318 ierr = PetscObjectStateQuery((PetscObject)g,&gstate);CHKERRQ(ierr); 319 ierr = PetscObjectStateQuery((PetscObject)l,&lstate);CHKERRQ(ierr); 320 ierr = PetscObjectSetState((PetscObject)g,PetscMax(gstate,lstate));CHKERRQ(ierr); 321 ierr = PetscObjectSetState((PetscObject)l,PetscMax(gstate,lstate));CHKERRQ(ierr); 322 PetscFunctionReturn(0); 323 } 324 325 #undef __FUNCT__ 326 #define __FUNCT__ "VecGhostGetLocalForm" 327 /*@ 328 VecGhostGetLocalForm - Obtains the local ghosted representation of 329 a parallel vector created with VecCreateGhost(). 330 331 Not Collective 332 333 Input Parameter: 334 . g - the global vector. Vector must be have been obtained with either 335 VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq(). 336 337 Output Parameter: 338 . l - the local (ghosted) representation 339 340 Notes: 341 This routine does not actually update the ghost values, but rather it 342 returns a sequential vector that includes the locations for the ghost 343 values and their current values. The returned vector and the original 344 vector passed in share the same array that contains the actual vector data. 345 346 One should call VecGhostRestoreLocalForm() or VecDestroy() once one is 347 finished using the object. 348 349 Level: advanced 350 351 Concepts: vectors^ghost point access 352 353 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray() 354 355 @*/ 356 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l) 357 { 358 PetscErrorCode ierr; 359 PetscTruth isseq,ismpi; 360 361 PetscFunctionBegin; 362 PetscValidHeaderSpecific(g,VEC_CLASSID,1); 363 PetscValidPointer(l,2); 364 365 ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr); 366 ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr); 367 if (ismpi) { 368 Vec_MPI *v = (Vec_MPI*)g->data; 369 if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 370 *l = v->localrep; 371 } else if (isseq) { 372 *l = g; 373 } else { 374 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name); 375 } 376 ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr); 377 ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr); 378 PetscFunctionReturn(0); 379 } 380 381 #undef __FUNCT__ 382 #define __FUNCT__ "VecGhostRestoreLocalForm" 383 /*@ 384 VecGhostRestoreLocalForm - Restores the local ghosted representation of 385 a parallel vector obtained with VecGhostGetLocalForm(). 386 387 Not Collective 388 389 Input Parameter: 390 + g - the global vector 391 - l - the local (ghosted) representation 392 393 Notes: 394 This routine does not actually update the ghost values, but rather it 395 returns a sequential vector that includes the locations for the ghost values 396 and their current values. 397 398 Level: advanced 399 400 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray() 401 @*/ 402 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l) 403 { 404 PetscErrorCode ierr; 405 406 PetscFunctionBegin; 407 ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr); 408 ierr = PetscObjectDereference((PetscObject)*l);CHKERRQ(ierr); 409 PetscFunctionReturn(0); 410 } 411 412 #undef __FUNCT__ 413 #define __FUNCT__ "VecGhostUpdateBegin" 414 /*@ 415 VecGhostUpdateBegin - Begins the vector scatter to update the vector from 416 local representation to global or global representation to local. 417 418 Collective on Vec 419 420 Input Parameters: 421 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 422 . insertmode - one of ADD_VALUES or INSERT_VALUES 423 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 424 425 Notes: 426 Use the following to update the ghost regions with correct values from the owning process 427 .vb 428 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 429 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 430 .ve 431 432 Use the following to accumulate the ghost region values onto the owning processors 433 .vb 434 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 435 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 436 .ve 437 438 To accumulate the ghost region values onto the owning processors and then update 439 the ghost regions correctly, call the later followed by the former, i.e., 440 .vb 441 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 442 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 443 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 444 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 445 .ve 446 447 Level: advanced 448 449 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(), 450 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 451 452 @*/ 453 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode) 454 { 455 Vec_MPI *v; 456 PetscErrorCode ierr; 457 458 PetscFunctionBegin; 459 PetscValidHeaderSpecific(g,VEC_CLASSID,1); 460 461 v = (Vec_MPI*)g->data; 462 if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 463 if (!v->localupdate) PetscFunctionReturn(0); 464 465 if (scattermode == SCATTER_REVERSE) { 466 ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr); 467 } else { 468 ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr); 469 } 470 PetscFunctionReturn(0); 471 } 472 473 #undef __FUNCT__ 474 #define __FUNCT__ "VecGhostUpdateEnd" 475 /*@ 476 VecGhostUpdateEnd - End the vector scatter to update the vector from 477 local representation to global or global representation to local. 478 479 Collective on Vec 480 481 Input Parameters: 482 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 483 . insertmode - one of ADD_VALUES or INSERT_VALUES 484 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 485 486 Notes: 487 488 Use the following to update the ghost regions with correct values from the owning process 489 .vb 490 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 491 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 492 .ve 493 494 Use the following to accumulate the ghost region values onto the owning processors 495 .vb 496 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 497 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 498 .ve 499 500 To accumulate the ghost region values onto the owning processors and then update 501 the ghost regions correctly, call the later followed by the former, i.e., 502 .vb 503 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 504 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 505 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 506 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 507 .ve 508 509 Level: advanced 510 511 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(), 512 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 513 514 @*/ 515 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode) 516 { 517 Vec_MPI *v; 518 PetscErrorCode ierr; 519 520 PetscFunctionBegin; 521 PetscValidHeaderSpecific(g,VEC_CLASSID,1); 522 523 v = (Vec_MPI*)g->data; 524 if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 525 if (!v->localupdate) PetscFunctionReturn(0); 526 527 if (scattermode == SCATTER_REVERSE) { 528 ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr); 529 } else { 530 ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr); 531 } 532 PetscFunctionReturn(0); 533 } 534 535 #undef __FUNCT__ 536 #define __FUNCT__ "VecCreateGhostWithArray" 537 /*@C 538 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 539 the caller allocates the array space. 540 541 Collective on MPI_Comm 542 543 Input Parameters: 544 + comm - the MPI communicator to use 545 . n - local vector length 546 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 547 . nghost - number of local ghost points 548 . ghosts - global indices of ghost points (or PETSC_NULL if not needed) 549 - array - the space to store the vector values (as long as n + nghost) 550 551 Output Parameter: 552 . vv - the global vector representation (without ghost points as part of vector) 553 554 Notes: 555 Use VecGhostGetLocalForm() to access the local, ghosted representation 556 of the vector. 557 558 This also automatically sets the ISLocalToGlobalMapping() for this vector. 559 560 Level: advanced 561 562 Concepts: vectors^creating with array 563 Concepts: vectors^ghosted 564 565 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 566 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 567 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 568 569 @*/ 570 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 571 { 572 PetscErrorCode ierr; 573 Vec_MPI *w; 574 PetscScalar *larray; 575 IS from,to; 576 ISLocalToGlobalMapping ltog; 577 PetscInt rstart,i,*indices; 578 579 PetscFunctionBegin; 580 *vv = 0; 581 582 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 583 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 584 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 585 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 586 /* Create global representation */ 587 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 588 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 589 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr); 590 w = (Vec_MPI *)(*vv)->data; 591 /* Create local representation */ 592 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 593 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 594 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 595 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 596 597 /* 598 Create scatter context for scattering (updating) ghost values 599 */ 600 ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr); 601 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 602 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 603 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 604 ierr = ISDestroy(to);CHKERRQ(ierr); 605 ierr = ISDestroy(from);CHKERRQ(ierr); 606 607 /* set local to global mapping for ghosted vector */ 608 ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 609 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 610 for (i=0; i<n; i++) { 611 indices[i] = rstart + i; 612 } 613 for (i=0; i<nghost; i++) { 614 indices[n+i] = ghosts[i]; 615 } 616 ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,<og);CHKERRQ(ierr); 617 ierr = PetscFree(indices);CHKERRQ(ierr); 618 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 619 ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr); 620 ierr = PetscFree(indices);CHKERRQ(ierr); 621 PetscFunctionReturn(0); 622 } 623 624 #undef __FUNCT__ 625 #define __FUNCT__ "VecCreateGhost" 626 /*@ 627 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 628 629 Collective on MPI_Comm 630 631 Input Parameters: 632 + comm - the MPI communicator to use 633 . n - local vector length 634 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 635 . nghost - number of local ghost points 636 - ghosts - global indices of ghost points 637 638 Output Parameter: 639 . vv - the global vector representation (without ghost points as part of vector) 640 641 Notes: 642 Use VecGhostGetLocalForm() to access the local, ghosted representation 643 of the vector. 644 645 This also automatically sets the ISLocalToGlobalMapping() for this vector. 646 647 Level: advanced 648 649 Concepts: vectors^ghosted 650 651 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 652 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 653 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 654 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 655 656 @*/ 657 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 658 { 659 PetscErrorCode ierr; 660 661 PetscFunctionBegin; 662 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 663 PetscFunctionReturn(0); 664 } 665 666 #undef __FUNCT__ 667 #define __FUNCT__ "VecDuplicate_MPI" 668 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 669 { 670 PetscErrorCode ierr; 671 Vec_MPI *vw,*w = (Vec_MPI *)win->data; 672 PetscScalar *array; 673 674 PetscFunctionBegin; 675 ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr); 676 677 /* use the map that exists aleady in win */ 678 ierr = PetscLayoutDestroy((*v)->map);CHKERRQ(ierr); 679 (*v)->map = win->map; 680 win->map->refcnt++; 681 682 ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr); 683 vw = (Vec_MPI *)(*v)->data; 684 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 685 686 /* save local representation of the parallel vector (and scatter) if it exists */ 687 if (w->localrep) { 688 ierr = VecGetArray(*v,&array);CHKERRQ(ierr); 689 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 690 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 691 ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr); 692 ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr); 693 vw->localupdate = w->localupdate; 694 if (vw->localupdate) { 695 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 696 } 697 } 698 699 /* New vector should inherit stashing property of parent */ 700 (*v)->stash.donotstash = win->stash.donotstash; 701 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 702 703 ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr); 704 ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr); 705 if (win->mapping) { 706 ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr); 707 (*v)->mapping = win->mapping; 708 } 709 if (win->bmapping) { 710 ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr); 711 (*v)->bmapping = win->bmapping; 712 } 713 (*v)->map->bs = win->map->bs; 714 (*v)->bstash.bs = win->bstash.bs; 715 716 PetscFunctionReturn(0); 717 } 718 719 /* ------------------------------------------------------------------------------------------*/ 720 #undef __FUNCT__ 721 #define __FUNCT__ "VecCreateGhostBlockWithArray" 722 /*@C 723 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 724 the caller allocates the array space. Indices in the ghost region are based on blocks. 725 726 Collective on MPI_Comm 727 728 Input Parameters: 729 + comm - the MPI communicator to use 730 . bs - block size 731 . n - local vector length 732 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 733 . nghost - number of local ghost blocks 734 . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed) 735 - array - the space to store the vector values (as long as n + nghost*bs) 736 737 Output Parameter: 738 . vv - the global vector representation (without ghost points as part of vector) 739 740 Notes: 741 Use VecGhostGetLocalForm() to access the local, ghosted representation 742 of the vector. 743 744 n is the local vector size (total local size not the number of blocks) while nghost 745 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 746 portion is bs*nghost 747 748 Level: advanced 749 750 Concepts: vectors^creating ghosted 751 Concepts: vectors^creating with array 752 753 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 754 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 755 VecCreateGhostWithArray(), VecCreateGhostBlocked() 756 757 @*/ 758 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 759 { 760 PetscErrorCode ierr; 761 Vec_MPI *w; 762 PetscScalar *larray; 763 IS from,to; 764 ISLocalToGlobalMapping ltog; 765 PetscInt rstart,i,nb,*indices; 766 767 PetscFunctionBegin; 768 *vv = 0; 769 770 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 771 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 772 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 773 if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size"); 774 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 775 /* Create global representation */ 776 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 777 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 778 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr); 779 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 780 w = (Vec_MPI *)(*vv)->data; 781 /* Create local representation */ 782 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 783 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 784 ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr); 785 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 786 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 787 788 /* 789 Create scatter context for scattering (updating) ghost values 790 */ 791 ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr); 792 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 793 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 794 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 795 ierr = ISDestroy(to);CHKERRQ(ierr); 796 ierr = ISDestroy(from);CHKERRQ(ierr); 797 798 /* set local to global mapping for ghosted vector */ 799 nb = n/bs; 800 ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 801 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 802 for (i=0; i<nb; i++) { 803 indices[i] = rstart + i*bs; 804 } 805 for (i=0; i<nghost; i++) { 806 indices[nb+i] = ghosts[i]; 807 } 808 ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,<og);CHKERRQ(ierr); 809 ierr = PetscFree(indices);CHKERRQ(ierr); 810 ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr); 811 ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr); 812 ierr = PetscFree(indices);CHKERRQ(ierr); 813 814 PetscFunctionReturn(0); 815 } 816 817 #undef __FUNCT__ 818 #define __FUNCT__ "VecCreateGhostBlock" 819 /*@ 820 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 821 The indicing of the ghost points is done with blocks. 822 823 Collective on MPI_Comm 824 825 Input Parameters: 826 + comm - the MPI communicator to use 827 . bs - the block size 828 . n - local vector length 829 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 830 . nghost - number of local ghost blocks 831 - ghosts - global indices of ghost blocks 832 833 Output Parameter: 834 . vv - the global vector representation (without ghost points as part of vector) 835 836 Notes: 837 Use VecGhostGetLocalForm() to access the local, ghosted representation 838 of the vector. 839 840 n is the local vector size (total local size not the number of blocks) while nghost 841 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 842 portion is bs*nghost 843 844 Level: advanced 845 846 Concepts: vectors^ghosted 847 848 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 849 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 850 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 851 852 @*/ 853 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 854 { 855 PetscErrorCode ierr; 856 857 PetscFunctionBegin; 858 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 859 PetscFunctionReturn(0); 860 } 861 862 /* 863 These introduce a ghosted vector where the ghosting is determined by the call to 864 VecSetLocalToGlobalMapping() 865 */ 866 867 #undef __FUNCT__ 868 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI" 869 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map) 870 { 871 PetscErrorCode ierr; 872 Vec_MPI *v = (Vec_MPI *)vv->data; 873 874 PetscFunctionBegin; 875 v->nghost = map->n - vv->map->n; 876 877 /* we need to make longer the array space that was allocated when the vector was created */ 878 ierr = PetscFree(v->array_allocated);CHKERRQ(ierr); 879 ierr = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr); 880 v->array = v->array_allocated; 881 882 /* Create local representation */ 883 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr); 884 ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr); 885 PetscFunctionReturn(0); 886 } 887 888 889 #undef __FUNCT__ 890 #define __FUNCT__ "VecSetValuesLocal_FETI" 891 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode) 892 { 893 PetscErrorCode ierr; 894 Vec_MPI *v = (Vec_MPI *)vv->data; 895 896 PetscFunctionBegin; 897 ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr); 898 PetscFunctionReturn(0); 899 } 900 901 EXTERN_C_BEGIN 902 #undef __FUNCT__ 903 #define __FUNCT__ "VecCreate_FETI" 904 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv) 905 { 906 PetscErrorCode ierr; 907 908 PetscFunctionBegin; 909 ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr); 910 911 /* overwrite the functions to handle setting values locally */ 912 vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI; 913 vv->ops->setvalueslocal = VecSetValuesLocal_FETI; 914 vv->ops->assemblybegin = 0; 915 vv->ops->assemblyend = 0; 916 vv->ops->setvaluesblocked = 0; 917 vv->ops->setvaluesblocked = 0; 918 919 PetscFunctionReturn(0); 920 } 921 EXTERN_C_END 922 923 924 925 926 927 928 929 930 931