1 #define PETSCVEC_DLL 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include "src/vec/vec/impls/mpi/pvecimpl.h" /*I "petscvec.h" I*/ 6 7 #if 0 8 #undef __FUNCT__ 9 #define __FUNCT__ "VecPublish_MPI" 10 static PetscErrorCode VecPublish_MPI(PetscObject obj) 11 { 12 PetscFunctionBegin; 13 PetscFunctionReturn(0); 14 } 15 #endif 16 17 #undef __FUNCT__ 18 #define __FUNCT__ "VecDot_MPI" 19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 20 { 21 PetscScalar sum,work; 22 PetscErrorCode ierr; 23 24 PetscFunctionBegin; 25 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 26 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr); 27 *z = sum; 28 PetscFunctionReturn(0); 29 } 30 31 #undef __FUNCT__ 32 #define __FUNCT__ "VecTDot_MPI" 33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 34 { 35 PetscScalar sum,work; 36 PetscErrorCode ierr; 37 38 PetscFunctionBegin; 39 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 40 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr); 41 *z = sum; 42 PetscFunctionReturn(0); 43 } 44 45 #undef __FUNCT__ 46 #define __FUNCT__ "VecSetOption_MPI" 47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag) 48 { 49 PetscFunctionBegin; 50 if (op == VEC_IGNORE_OFF_PROC_ENTRIES) { 51 v->stash.donotstash = flag; 52 } else if (op == VEC_IGNORE_NEGATIVE_INDICES) { 53 v->stash.ignorenegidx = flag; 54 } 55 PetscFunctionReturn(0); 56 } 57 58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *); 59 EXTERN_C_BEGIN 60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 61 EXTERN_C_END 62 63 #undef __FUNCT__ 64 #define __FUNCT__ "VecPlaceArray_MPI" 65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 66 { 67 PetscErrorCode ierr; 68 Vec_MPI *v = (Vec_MPI *)vin->data; 69 70 PetscFunctionBegin; 71 if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 72 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 73 v->array = (PetscScalar *)a; 74 if (v->localrep) { 75 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 76 } 77 PetscFunctionReturn(0); 78 } 79 80 #undef __FUNCT__ 81 #define __FUNCT__ "VecResetArray_MPI" 82 PetscErrorCode VecResetArray_MPI(Vec vin) 83 { 84 Vec_MPI *v = (Vec_MPI *)vin->data; 85 PetscErrorCode ierr; 86 87 PetscFunctionBegin; 88 v->array = v->unplacedarray; 89 v->unplacedarray = 0; 90 if (v->localrep) { 91 ierr = VecResetArray(v->localrep);CHKERRQ(ierr); 92 } 93 PetscFunctionReturn(0); 94 } 95 96 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*); 97 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []); 98 99 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */ 100 VecDuplicateVecs_Default, 101 VecDestroyVecs_Default, 102 VecDot_MPI, 103 VecMDot_MPI, 104 VecNorm_MPI, 105 VecTDot_MPI, 106 VecMTDot_MPI, 107 VecScale_Seq, 108 VecCopy_Seq, /* 10 */ 109 VecSet_Seq, 110 VecSwap_Seq, 111 VecAXPY_Seq, 112 VecAXPBY_Seq, 113 VecMAXPY_Seq, 114 VecAYPX_Seq, 115 VecWAXPY_Seq, 116 VecAXPBYPCZ_Seq, 117 VecPointwiseMult_Seq, 118 VecPointwiseDivide_Seq, 119 VecSetValues_MPI, /* 20 */ 120 VecAssemblyBegin_MPI, 121 VecAssemblyEnd_MPI, 122 VecGetArray_Seq, 123 VecGetSize_MPI, 124 VecGetSize_Seq, 125 VecRestoreArray_Seq, 126 VecMax_MPI, 127 VecMin_MPI, 128 VecSetRandom_Seq, 129 VecSetOption_MPI, 130 VecSetValuesBlocked_MPI, 131 VecDestroy_MPI, 132 VecView_MPI, 133 VecPlaceArray_MPI, 134 VecReplaceArray_Seq, 135 VecDot_Seq, 136 VecTDot_Seq, 137 VecNorm_Seq, 138 VecMDot_Seq, 139 VecMTDot_Seq, 140 VecLoadIntoVector_Default, 141 VecReciprocal_Default, 142 0, /* VecViewNative... */ 143 VecConjugate_Seq, 144 0, 145 0, 146 VecResetArray_MPI, 147 0, 148 VecMaxPointwiseDivide_Seq, 149 VecLoad_Binary, 150 VecPointwiseMax_Seq, 151 VecPointwiseMaxAbs_Seq, 152 VecPointwiseMin_Seq, 153 VecGetValues_MPI}; 154 155 #undef __FUNCT__ 156 #define __FUNCT__ "VecCreate_MPI_Private" 157 /* 158 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 159 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 160 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 161 162 If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise 163 no space is allocated. 164 */ 165 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[]) 166 { 167 Vec_MPI *s; 168 PetscErrorCode ierr; 169 170 PetscFunctionBegin; 171 172 ierr = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr); 173 v->data = (void*)s; 174 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 175 s->nghost = nghost; 176 v->mapping = 0; 177 v->bmapping = 0; 178 v->petscnative = PETSC_TRUE; 179 180 if (v->map.bs == -1) v->map.bs = 1; 181 ierr = PetscMapSetUp(&v->map);CHKERRQ(ierr); 182 s->array = (PetscScalar *)array; 183 s->array_allocated = 0; 184 if (alloc) { 185 PetscInt n = v->map.n+nghost; 186 ierr = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr); 187 ierr = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr); 188 ierr = PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));CHKERRQ(ierr); 189 s->array_allocated = s->array; 190 } 191 192 /* By default parallel vectors do not have local representation */ 193 s->localrep = 0; 194 s->localupdate = 0; 195 196 v->stash.insertmode = NOT_SET_VALUES; 197 /* create the stashes. The block-size for bstash is set later when 198 VecSetValuesBlocked is called. 199 */ 200 ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr); 201 ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map.bs,&v->bstash);CHKERRQ(ierr); 202 203 #if defined(PETSC_HAVE_MATLAB_ENGINE) 204 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr); 205 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr); 206 #endif 207 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 208 ierr = PetscPublishAll(v);CHKERRQ(ierr); 209 PetscFunctionReturn(0); 210 } 211 212 /*MC 213 VECMPI - VECMPI = "mpi" - The basic parallel vector 214 215 Options Database Keys: 216 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 217 218 Level: beginner 219 220 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 221 M*/ 222 223 EXTERN_C_BEGIN 224 #undef __FUNCT__ 225 #define __FUNCT__ "VecCreate_MPI" 226 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv) 227 { 228 PetscErrorCode ierr; 229 230 PetscFunctionBegin; 231 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr); 232 PetscFunctionReturn(0); 233 } 234 EXTERN_C_END 235 236 #undef __FUNCT__ 237 #define __FUNCT__ "VecCreateMPIWithArray" 238 /*@C 239 VecCreateMPIWithArray - Creates a parallel, array-style vector, 240 where the user provides the array space to store the vector values. 241 242 Collective on MPI_Comm 243 244 Input Parameters: 245 + comm - the MPI communicator to use 246 . n - local vector length, cannot be PETSC_DECIDE 247 . N - global vector length (or PETSC_DECIDE to have calculated) 248 - array - the user provided array to store the vector values 249 250 Output Parameter: 251 . vv - the vector 252 253 Notes: 254 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 255 same type as an existing vector. 256 257 If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used 258 at a later stage to SET the array for storing the vector values. 259 260 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 261 The user should not free the array until the vector is destroyed. 262 263 Level: intermediate 264 265 Concepts: vectors^creating with array 266 267 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 268 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 269 270 @*/ 271 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 272 { 273 PetscErrorCode ierr; 274 275 PetscFunctionBegin; 276 if (n == PETSC_DECIDE) { 277 SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 278 } 279 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 280 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 281 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 282 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 283 PetscFunctionReturn(0); 284 } 285 286 #undef __FUNCT__ 287 #define __FUNCT__ "VecGhostGetLocalForm" 288 /*@ 289 VecGhostGetLocalForm - Obtains the local ghosted representation of 290 a parallel vector created with VecCreateGhost(). 291 292 Not Collective 293 294 Input Parameter: 295 . g - the global vector. Vector must be have been obtained with either 296 VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq(). 297 298 Output Parameter: 299 . l - the local (ghosted) representation 300 301 Notes: 302 This routine does not actually update the ghost values, but rather it 303 returns a sequential vector that includes the locations for the ghost 304 values and their current values. The returned vector and the original 305 vector passed in share the same array that contains the actual vector data. 306 307 One should call VecGhostRestoreLocalForm() or VecDestroy() once one is 308 finished using the object. 309 310 Level: advanced 311 312 Concepts: vectors^ghost point access 313 314 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray() 315 316 @*/ 317 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l) 318 { 319 PetscErrorCode ierr; 320 PetscTruth isseq,ismpi; 321 322 PetscFunctionBegin; 323 PetscValidHeaderSpecific(g,VEC_COOKIE,1); 324 PetscValidPointer(l,2); 325 326 ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr); 327 ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr); 328 if (ismpi) { 329 Vec_MPI *v = (Vec_MPI*)g->data; 330 if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 331 *l = v->localrep; 332 } else if (isseq) { 333 *l = g; 334 } else { 335 SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name); 336 } 337 ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr); 338 PetscFunctionReturn(0); 339 } 340 341 #undef __FUNCT__ 342 #define __FUNCT__ "VecGhostRestoreLocalForm" 343 /*@ 344 VecGhostRestoreLocalForm - Restores the local ghosted representation of 345 a parallel vector obtained with VecGhostGetLocalForm(). 346 347 Not Collective 348 349 Input Parameter: 350 + g - the global vector 351 - l - the local (ghosted) representation 352 353 Notes: 354 This routine does not actually update the ghost values, but rather it 355 returns a sequential vector that includes the locations for the ghost values 356 and their current values. 357 358 Level: advanced 359 360 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray() 361 @*/ 362 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l) 363 { 364 PetscFunctionBegin; 365 PetscObjectDereference((PetscObject)*l); 366 PetscFunctionReturn(0); 367 } 368 369 #undef __FUNCT__ 370 #define __FUNCT__ "VecGhostUpdateBegin" 371 /*@ 372 VecGhostUpdateBegin - Begins the vector scatter to update the vector from 373 local representation to global or global representation to local. 374 375 Collective on Vec 376 377 Input Parameters: 378 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 379 . insertmode - one of ADD_VALUES or INSERT_VALUES 380 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 381 382 Notes: 383 Use the following to update the ghost regions with correct values from the owning process 384 .vb 385 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 386 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 387 .ve 388 389 Use the following to accumulate the ghost region values onto the owning processors 390 .vb 391 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 392 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 393 .ve 394 395 To accumulate the ghost region values onto the owning processors and then update 396 the ghost regions correctly, call the later followed by the former, i.e., 397 .vb 398 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 399 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 400 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 401 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 402 .ve 403 404 Level: advanced 405 406 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(), 407 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 408 409 @*/ 410 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode) 411 { 412 Vec_MPI *v; 413 PetscErrorCode ierr; 414 415 PetscFunctionBegin; 416 PetscValidHeaderSpecific(g,VEC_COOKIE,1); 417 418 v = (Vec_MPI*)g->data; 419 if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 420 if (!v->localupdate) PetscFunctionReturn(0); 421 422 if (scattermode == SCATTER_REVERSE) { 423 ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr); 424 } else { 425 ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr); 426 } 427 PetscFunctionReturn(0); 428 } 429 430 #undef __FUNCT__ 431 #define __FUNCT__ "VecGhostUpdateEnd" 432 /*@ 433 VecGhostUpdateEnd - End the vector scatter to update the vector from 434 local representation to global or global representation to local. 435 436 Collective on Vec 437 438 Input Parameters: 439 + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) 440 . insertmode - one of ADD_VALUES or INSERT_VALUES 441 - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE 442 443 Notes: 444 445 Use the following to update the ghost regions with correct values from the owning process 446 .vb 447 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 448 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 449 .ve 450 451 Use the following to accumulate the ghost region values onto the owning processors 452 .vb 453 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 454 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 455 .ve 456 457 To accumulate the ghost region values onto the owning processors and then update 458 the ghost regions correctly, call the later followed by the former, i.e., 459 .vb 460 VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); 461 VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); 462 VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); 463 VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); 464 .ve 465 466 Level: advanced 467 468 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(), 469 VecGhostRestoreLocalForm(),VecCreateGhostWithArray() 470 471 @*/ 472 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode) 473 { 474 Vec_MPI *v; 475 PetscErrorCode ierr; 476 477 PetscFunctionBegin; 478 PetscValidHeaderSpecific(g,VEC_COOKIE,1); 479 480 v = (Vec_MPI*)g->data; 481 if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); 482 if (!v->localupdate) PetscFunctionReturn(0); 483 484 if (scattermode == SCATTER_REVERSE) { 485 ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr); 486 } else { 487 ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr); 488 } 489 PetscFunctionReturn(0); 490 } 491 492 #undef __FUNCT__ 493 #define __FUNCT__ "VecCreateGhostWithArray" 494 /*@C 495 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 496 the caller allocates the array space. 497 498 Collective on MPI_Comm 499 500 Input Parameters: 501 + comm - the MPI communicator to use 502 . n - local vector length 503 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 504 . nghost - number of local ghost points 505 . ghosts - global indices of ghost points (or PETSC_NULL if not needed) 506 - array - the space to store the vector values (as long as n + nghost) 507 508 Output Parameter: 509 . vv - the global vector representation (without ghost points as part of vector) 510 511 Notes: 512 Use VecGhostGetLocalForm() to access the local, ghosted representation 513 of the vector. 514 515 This also automatically sets the ISLocalToGlobalMapping() for this vector. 516 517 Level: advanced 518 519 Concepts: vectors^creating with array 520 Concepts: vectors^ghosted 521 522 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 523 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 524 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 525 526 @*/ 527 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 528 { 529 PetscErrorCode ierr; 530 Vec_MPI *w; 531 PetscScalar *larray; 532 IS from,to; 533 ISLocalToGlobalMapping ltog; 534 PetscInt rstart,i,*indices; 535 536 PetscFunctionBegin; 537 *vv = 0; 538 539 if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 540 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 541 if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 542 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 543 /* Create global representation */ 544 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 545 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 546 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr); 547 w = (Vec_MPI *)(*vv)->data; 548 /* Create local representation */ 549 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 550 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 551 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 552 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 553 554 /* 555 Create scatter context for scattering (updating) ghost values 556 */ 557 ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr); 558 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 559 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 560 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 561 ierr = ISDestroy(to);CHKERRQ(ierr); 562 ierr = ISDestroy(from);CHKERRQ(ierr); 563 564 /* set local to global mapping for ghosted vector */ 565 ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 566 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 567 for (i=0; i<n; i++) { 568 indices[i] = rstart + i; 569 } 570 for (i=0; i<nghost; i++) { 571 indices[n+i] = ghosts[i]; 572 } 573 ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,<og);CHKERRQ(ierr); 574 ierr = PetscFree(indices);CHKERRQ(ierr); 575 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 576 ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr); 577 ierr = PetscFree(indices);CHKERRQ(ierr); 578 PetscFunctionReturn(0); 579 } 580 581 #undef __FUNCT__ 582 #define __FUNCT__ "VecCreateGhost" 583 /*@ 584 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 585 586 Collective on MPI_Comm 587 588 Input Parameters: 589 + comm - the MPI communicator to use 590 . n - local vector length 591 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 592 . nghost - number of local ghost points 593 - ghosts - global indices of ghost points 594 595 Output Parameter: 596 . vv - the global vector representation (without ghost points as part of vector) 597 598 Notes: 599 Use VecGhostGetLocalForm() to access the local, ghosted representation 600 of the vector. 601 602 This also automatically sets the ISLocalToGlobalMapping() for this vector. 603 604 Level: advanced 605 606 Concepts: vectors^ghosted 607 608 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 609 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 610 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 611 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 612 613 @*/ 614 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 615 { 616 PetscErrorCode ierr; 617 618 PetscFunctionBegin; 619 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 620 PetscFunctionReturn(0); 621 } 622 623 #undef __FUNCT__ 624 #define __FUNCT__ "VecDuplicate_MPI" 625 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 626 { 627 PetscErrorCode ierr; 628 Vec_MPI *vw,*w = (Vec_MPI *)win->data; 629 PetscScalar *array; 630 631 PetscFunctionBegin; 632 ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr); 633 ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr); 634 ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr); 635 vw = (Vec_MPI *)(*v)->data; 636 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 637 638 /* save local representation of the parallel vector (and scatter) if it exists */ 639 if (w->localrep) { 640 ierr = VecGetArray(*v,&array);CHKERRQ(ierr); 641 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 642 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 643 ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr); 644 ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr); 645 vw->localupdate = w->localupdate; 646 if (vw->localupdate) { 647 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 648 } 649 } 650 651 /* New vector should inherit stashing property of parent */ 652 (*v)->stash.donotstash = win->stash.donotstash; 653 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 654 655 ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr); 656 ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr); 657 if (win->mapping) { 658 ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr); 659 (*v)->mapping = win->mapping; 660 } 661 if (win->bmapping) { 662 ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr); 663 (*v)->bmapping = win->bmapping; 664 } 665 (*v)->map.bs = win->map.bs; 666 (*v)->bstash.bs = win->bstash.bs; 667 668 PetscFunctionReturn(0); 669 } 670 671 /* ------------------------------------------------------------------------------------------*/ 672 #undef __FUNCT__ 673 #define __FUNCT__ "VecCreateGhostBlockWithArray" 674 /*@C 675 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 676 the caller allocates the array space. Indices in the ghost region are based on blocks. 677 678 Collective on MPI_Comm 679 680 Input Parameters: 681 + comm - the MPI communicator to use 682 . bs - block size 683 . n - local vector length 684 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 685 . nghost - number of local ghost blocks 686 . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed) 687 - array - the space to store the vector values (as long as n + nghost*bs) 688 689 Output Parameter: 690 . vv - the global vector representation (without ghost points as part of vector) 691 692 Notes: 693 Use VecGhostGetLocalForm() to access the local, ghosted representation 694 of the vector. 695 696 n is the local vector size (total local size not the number of blocks) while nghost 697 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 698 portion is bs*nghost 699 700 Level: advanced 701 702 Concepts: vectors^creating ghosted 703 Concepts: vectors^creating with array 704 705 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 706 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 707 VecCreateGhostWithArray(), VecCreateGhostBlocked() 708 709 @*/ 710 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 711 { 712 PetscErrorCode ierr; 713 Vec_MPI *w; 714 PetscScalar *larray; 715 IS from,to; 716 717 PetscFunctionBegin; 718 *vv = 0; 719 720 if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 721 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 722 if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 723 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 724 /* Create global representation */ 725 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 726 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 727 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,nghost*bs,array);CHKERRQ(ierr); 728 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 729 w = (Vec_MPI *)(*vv)->data; 730 /* Create local representation */ 731 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 732 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 733 ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr); 734 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 735 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 736 737 /* 738 Create scatter context for scattering (updating) ghost values 739 */ 740 ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr); 741 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 742 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 743 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 744 ierr = ISDestroy(to);CHKERRQ(ierr); 745 ierr = ISDestroy(from);CHKERRQ(ierr); 746 747 PetscFunctionReturn(0); 748 } 749 750 #undef __FUNCT__ 751 #define __FUNCT__ "VecCreateGhostBlock" 752 /*@ 753 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 754 The indicing of the ghost points is done with blocks. 755 756 Collective on MPI_Comm 757 758 Input Parameters: 759 + comm - the MPI communicator to use 760 . bs - the block size 761 . n - local vector length 762 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 763 . nghost - number of local ghost blocks 764 - ghosts - global indices of ghost blocks 765 766 Output Parameter: 767 . vv - the global vector representation (without ghost points as part of vector) 768 769 Notes: 770 Use VecGhostGetLocalForm() to access the local, ghosted representation 771 of the vector. 772 773 n is the local vector size (total local size not the number of blocks) while nghost 774 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 775 portion is bs*nghost 776 777 Level: advanced 778 779 Concepts: vectors^ghosted 780 781 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 782 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 783 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 784 785 @*/ 786 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 787 { 788 PetscErrorCode ierr; 789 790 PetscFunctionBegin; 791 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 792 PetscFunctionReturn(0); 793 } 794 795 /* 796 These introduce a ghosted vector where the ghosting is determined by the call to 797 VecSetLocalToGlobalMapping() 798 */ 799 800 #undef __FUNCT__ 801 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI" 802 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map) 803 { 804 PetscErrorCode ierr; 805 Vec_MPI *v = (Vec_MPI *)vv->data; 806 807 PetscFunctionBegin; 808 v->nghost = map->n - vv->map.n; 809 810 /* we need to make longer the array space that was allocated when the vector was created */ 811 ierr = PetscFree(v->array_allocated);CHKERRQ(ierr); 812 ierr = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr); 813 v->array = v->array_allocated; 814 815 /* Create local representation */ 816 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr); 817 ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr); 818 PetscFunctionReturn(0); 819 } 820 821 822 #undef __FUNCT__ 823 #define __FUNCT__ "VecSetValuesLocal_FETI" 824 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode) 825 { 826 PetscErrorCode ierr; 827 Vec_MPI *v = (Vec_MPI *)vv->data; 828 829 PetscFunctionBegin; 830 ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr); 831 PetscFunctionReturn(0); 832 } 833 834 EXTERN_C_BEGIN 835 #undef __FUNCT__ 836 #define __FUNCT__ "VecCreate_FETI" 837 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv) 838 { 839 PetscErrorCode ierr; 840 841 PetscFunctionBegin; 842 ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr); 843 844 /* overwrite the functions to handle setting values locally */ 845 vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI; 846 vv->ops->setvalueslocal = VecSetValuesLocal_FETI; 847 vv->ops->assemblybegin = 0; 848 vv->ops->assemblyend = 0; 849 vv->ops->setvaluesblocked = 0; 850 vv->ops->setvaluesblocked = 0; 851 852 PetscFunctionReturn(0); 853 } 854 EXTERN_C_END 855 856 857 858 859 860 861 862 863 864