1 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/ 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "VecDot_MPI" 9 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 10 { 11 PetscScalar sum,work; 12 PetscErrorCode ierr; 13 14 PetscFunctionBegin; 15 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 16 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 17 *z = sum; 18 PetscFunctionReturn(0); 19 } 20 21 #undef __FUNCT__ 22 #define __FUNCT__ "VecTDot_MPI" 23 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 24 { 25 PetscScalar sum,work; 26 PetscErrorCode ierr; 27 28 PetscFunctionBegin; 29 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 30 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 31 *z = sum; 32 PetscFunctionReturn(0); 33 } 34 35 EXTERN_C_BEGIN 36 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 37 EXTERN_C_END 38 39 #undef __FUNCT__ 40 #define __FUNCT__ "VecPlaceArray_MPI" 41 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 42 { 43 PetscErrorCode ierr; 44 Vec_MPI *v = (Vec_MPI *)vin->data; 45 46 PetscFunctionBegin; 47 if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 48 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 49 v->array = (PetscScalar *)a; 50 if (v->localrep) { 51 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 52 } 53 PetscFunctionReturn(0); 54 } 55 56 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []); 57 58 #undef __FUNCT__ 59 #define __FUNCT__ "VecDuplicate_MPI" 60 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 61 { 62 PetscErrorCode ierr; 63 Vec_MPI *vw,*w = (Vec_MPI *)win->data; 64 PetscScalar *array; 65 66 PetscFunctionBegin; 67 ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr); 68 ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr); 69 70 ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr); 71 vw = (Vec_MPI *)(*v)->data; 72 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 73 74 /* save local representation of the parallel vector (and scatter) if it exists */ 75 if (w->localrep) { 76 ierr = VecGetArray(*v,&array);CHKERRQ(ierr); 77 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->bs,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 78 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 79 ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr); 80 ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr); 81 vw->localupdate = w->localupdate; 82 if (vw->localupdate) { 83 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 84 } 85 } 86 87 /* New vector should inherit stashing property of parent */ 88 (*v)->stash.donotstash = win->stash.donotstash; 89 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 90 91 ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr); 92 ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr); 93 (*v)->map->bs = win->map->bs; 94 (*v)->bstash.bs = win->bstash.bs; 95 PetscFunctionReturn(0); 96 } 97 98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool); 99 extern PetscErrorCode VecResetArray_MPI(Vec); 100 101 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */ 102 VecDuplicateVecs_Default, 103 VecDestroyVecs_Default, 104 VecDot_MPI, 105 VecMDot_MPI, 106 VecNorm_MPI, 107 VecTDot_MPI, 108 VecMTDot_MPI, 109 VecScale_Seq, 110 VecCopy_Seq, /* 10 */ 111 VecSet_Seq, 112 VecSwap_Seq, 113 VecAXPY_Seq, 114 VecAXPBY_Seq, 115 VecMAXPY_Seq, 116 VecAYPX_Seq, 117 VecWAXPY_Seq, 118 VecAXPBYPCZ_Seq, 119 VecPointwiseMult_Seq, 120 VecPointwiseDivide_Seq, 121 VecSetValues_MPI, /* 20 */ 122 VecAssemblyBegin_MPI, 123 VecAssemblyEnd_MPI, 124 0, 125 VecGetSize_MPI, 126 VecGetSize_Seq, 127 0, 128 VecMax_MPI, 129 VecMin_MPI, 130 VecSetRandom_Seq, 131 VecSetOption_MPI, 132 VecSetValuesBlocked_MPI, 133 VecDestroy_MPI, 134 VecView_MPI, 135 VecPlaceArray_MPI, 136 VecReplaceArray_Seq, 137 VecDot_Seq, 138 VecTDot_Seq, 139 VecNorm_Seq, 140 VecMDot_Seq, 141 VecMTDot_Seq, 142 VecLoad_Default, 143 VecReciprocal_Default, 144 VecConjugate_Seq, 145 0, 146 0, 147 VecResetArray_MPI, 148 0, 149 VecMaxPointwiseDivide_Seq, 150 VecPointwiseMax_Seq, 151 VecPointwiseMaxAbs_Seq, 152 VecPointwiseMin_Seq, 153 VecGetValues_MPI, 154 0, 155 0, 156 0, 157 0, 158 0, 159 0, 160 VecStrideGather_Default, 161 VecStrideScatter_Default 162 }; 163 164 #undef __FUNCT__ 165 #define __FUNCT__ "VecCreate_MPI_Private" 166 /* 167 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 168 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 169 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 170 171 If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise 172 no space is allocated. 173 */ 174 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[]) 175 { 176 Vec_MPI *s; 177 PetscErrorCode ierr; 178 179 PetscFunctionBegin; 180 ierr = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr); 181 v->data = (void*)s; 182 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 183 s->nghost = nghost; 184 v->petscnative = PETSC_TRUE; 185 186 ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr); 187 s->array = (PetscScalar *)array; 188 s->array_allocated = 0; 189 if (alloc && !array) { 190 PetscInt n = v->map->n+nghost; 191 ierr = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr); 192 ierr = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr); 193 ierr = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr); 194 s->array_allocated = s->array; 195 } 196 197 /* By default parallel vectors do not have local representation */ 198 s->localrep = 0; 199 s->localupdate = 0; 200 201 v->stash.insertmode = NOT_SET_VALUES; 202 /* create the stashes. The block-size for bstash is set later when 203 VecSetValuesBlocked is called. 204 */ 205 ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr); 206 ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr); 207 208 #if defined(PETSC_HAVE_MATLAB_ENGINE) 209 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr); 210 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr); 211 #endif 212 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 213 PetscFunctionReturn(0); 214 } 215 216 /*MC 217 VECMPI - VECMPI = "mpi" - The basic parallel vector 218 219 Options Database Keys: 220 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 221 222 Level: beginner 223 224 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 225 M*/ 226 227 EXTERN_C_BEGIN 228 #undef __FUNCT__ 229 #define __FUNCT__ "VecCreate_MPI" 230 PetscErrorCode VecCreate_MPI(Vec vv) 231 { 232 PetscErrorCode ierr; 233 234 PetscFunctionBegin; 235 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr); 236 PetscFunctionReturn(0); 237 } 238 EXTERN_C_END 239 240 /*MC 241 VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process 242 243 Options Database Keys: 244 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions() 245 246 Level: beginner 247 248 .seealso: VecCreateSeq(), VecCreateMPI() 249 M*/ 250 251 EXTERN_C_BEGIN 252 #undef __FUNCT__ 253 #define __FUNCT__ "VecCreate_Standard" 254 PetscErrorCode VecCreate_Standard(Vec v) 255 { 256 PetscErrorCode ierr; 257 PetscMPIInt size; 258 259 PetscFunctionBegin; 260 ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr); 261 if (size == 1) { 262 ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr); 263 } else { 264 ierr = VecSetType(v,VECMPI);CHKERRQ(ierr); 265 } 266 PetscFunctionReturn(0); 267 } 268 EXTERN_C_END 269 270 271 #undef __FUNCT__ 272 #define __FUNCT__ "VecCreateMPIWithArray" 273 /*@C 274 VecCreateMPIWithArray - Creates a parallel, array-style vector, 275 where the user provides the array space to store the vector values. 276 277 Collective on MPI_Comm 278 279 Input Parameters: 280 + comm - the MPI communicator to use 281 . bs - block size, same meaning as VecSetBlockSize() 282 . n - local vector length, cannot be PETSC_DECIDE 283 . N - global vector length (or PETSC_DECIDE to have calculated) 284 - array - the user provided array to store the vector values 285 286 Output Parameter: 287 . vv - the vector 288 289 Notes: 290 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 291 same type as an existing vector. 292 293 If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used 294 at a later stage to SET the array for storing the vector values. 295 296 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 297 The user should not free the array until the vector is destroyed. 298 299 Level: intermediate 300 301 Concepts: vectors^creating with array 302 303 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 304 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 305 306 @*/ 307 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 308 { 309 PetscErrorCode ierr; 310 311 PetscFunctionBegin; 312 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 313 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 314 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 315 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 316 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 317 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 318 PetscFunctionReturn(0); 319 } 320 321 #undef __FUNCT__ 322 #define __FUNCT__ "VecCreateGhostWithArray" 323 /*@C 324 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 325 the caller allocates the array space. 326 327 Collective on MPI_Comm 328 329 Input Parameters: 330 + comm - the MPI communicator to use 331 . n - local vector length 332 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 333 . nghost - number of local ghost points 334 . ghosts - global indices of ghost points (or PETSC_NULL if not needed), these do not need to be in increasing order (sorted) 335 - array - the space to store the vector values (as long as n + nghost) 336 337 Output Parameter: 338 . vv - the global vector representation (without ghost points as part of vector) 339 340 Notes: 341 Use VecGhostGetLocalForm() to access the local, ghosted representation 342 of the vector. 343 344 This also automatically sets the ISLocalToGlobalMapping() for this vector. 345 346 Level: advanced 347 348 Concepts: vectors^creating with array 349 Concepts: vectors^ghosted 350 351 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 352 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 353 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 354 355 @*/ 356 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 357 { 358 PetscErrorCode ierr; 359 Vec_MPI *w; 360 PetscScalar *larray; 361 IS from,to; 362 ISLocalToGlobalMapping ltog; 363 PetscInt rstart,i,*indices; 364 365 PetscFunctionBegin; 366 *vv = 0; 367 368 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 369 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 370 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 371 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 372 /* Create global representation */ 373 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 374 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 375 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr); 376 w = (Vec_MPI *)(*vv)->data; 377 /* Create local representation */ 378 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 379 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 380 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 381 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 382 383 /* 384 Create scatter context for scattering (updating) ghost values 385 */ 386 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 387 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 388 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 389 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 390 ierr = ISDestroy(&to);CHKERRQ(ierr); 391 ierr = ISDestroy(&from);CHKERRQ(ierr); 392 393 /* set local to global mapping for ghosted vector */ 394 ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 395 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 396 for (i=0; i<n; i++) { 397 indices[i] = rstart + i; 398 } 399 for (i=0; i<nghost; i++) { 400 indices[n+i] = ghosts[i]; 401 } 402 ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 403 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 404 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 405 PetscFunctionReturn(0); 406 } 407 408 #undef __FUNCT__ 409 #define __FUNCT__ "VecCreateGhost" 410 /*@ 411 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 412 413 Collective on MPI_Comm 414 415 Input Parameters: 416 + comm - the MPI communicator to use 417 . n - local vector length 418 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 419 . nghost - number of local ghost points 420 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 421 422 Output Parameter: 423 . vv - the global vector representation (without ghost points as part of vector) 424 425 Notes: 426 Use VecGhostGetLocalForm() to access the local, ghosted representation 427 of the vector. 428 429 This also automatically sets the ISLocalToGlobalMapping() for this vector. 430 431 Level: advanced 432 433 Concepts: vectors^ghosted 434 435 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 436 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 437 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 438 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 439 440 @*/ 441 PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 442 { 443 PetscErrorCode ierr; 444 445 PetscFunctionBegin; 446 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 447 PetscFunctionReturn(0); 448 } 449 450 #undef __FUNCT__ 451 #define __FUNCT__ "VecMPISetGhost" 452 /*@ 453 VecMPISetGhost - Sets the ghost points for an MPI ghost vector 454 455 Collective on Vec 456 457 Input Parameters: 458 + vv - the MPI vector 459 . nghost - number of local ghost points 460 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 461 462 463 Notes: 464 Use VecGhostGetLocalForm() to access the local, ghosted representation 465 of the vector. 466 467 This also automatically sets the ISLocalToGlobalMapping() for this vector. 468 469 You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()). 470 471 Level: advanced 472 473 Concepts: vectors^ghosted 474 475 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 476 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 477 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 478 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 479 480 @*/ 481 PetscErrorCode VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[]) 482 { 483 PetscErrorCode ierr; 484 PetscBool flg; 485 486 PetscFunctionBegin; 487 ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr); 488 /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */ 489 if (flg) { 490 PetscInt n,N; 491 Vec_MPI *w; 492 PetscScalar *larray; 493 IS from,to; 494 ISLocalToGlobalMapping ltog; 495 PetscInt rstart,i,*indices; 496 MPI_Comm comm = ((PetscObject)vv)->comm; 497 498 n = vv->map->n; 499 N = vv->map->N; 500 ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr); 501 ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr); 502 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);CHKERRQ(ierr); 503 w = (Vec_MPI *)(vv)->data; 504 /* Create local representation */ 505 ierr = VecGetArray(vv,&larray);CHKERRQ(ierr); 506 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 507 ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr); 508 ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr); 509 510 /* 511 Create scatter context for scattering (updating) ghost values 512 */ 513 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 514 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 515 ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 516 ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr); 517 ierr = ISDestroy(&to);CHKERRQ(ierr); 518 ierr = ISDestroy(&from);CHKERRQ(ierr); 519 520 /* set local to global mapping for ghosted vector */ 521 ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 522 ierr = VecGetOwnershipRange(vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 523 for (i=0; i<n; i++) { 524 indices[i] = rstart + i; 525 } 526 for (i=0; i<nghost; i++) { 527 indices[n+i] = ghosts[i]; 528 } 529 ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 530 ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr); 531 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 532 } else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting"); 533 else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting"); 534 PetscFunctionReturn(0); 535 } 536 537 538 /* ------------------------------------------------------------------------------------------*/ 539 #undef __FUNCT__ 540 #define __FUNCT__ "VecCreateGhostBlockWithArray" 541 /*@C 542 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 543 the caller allocates the array space. Indices in the ghost region are based on blocks. 544 545 Collective on MPI_Comm 546 547 Input Parameters: 548 + comm - the MPI communicator to use 549 . bs - block size 550 . n - local vector length 551 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 552 . nghost - number of local ghost blocks 553 . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted) 554 - array - the space to store the vector values (as long as n + nghost*bs) 555 556 Output Parameter: 557 . vv - the global vector representation (without ghost points as part of vector) 558 559 Notes: 560 Use VecGhostGetLocalForm() to access the local, ghosted representation 561 of the vector. 562 563 n is the local vector size (total local size not the number of blocks) while nghost 564 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 565 portion is bs*nghost 566 567 Level: advanced 568 569 Concepts: vectors^creating ghosted 570 Concepts: vectors^creating with array 571 572 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 573 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 574 VecCreateGhostWithArray(), VecCreateGhostBlock() 575 576 @*/ 577 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 578 { 579 PetscErrorCode ierr; 580 Vec_MPI *w; 581 PetscScalar *larray; 582 IS from,to; 583 ISLocalToGlobalMapping ltog; 584 PetscInt rstart,i,nb,*indices; 585 586 PetscFunctionBegin; 587 *vv = 0; 588 589 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 590 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 591 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 592 if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size"); 593 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 594 /* Create global representation */ 595 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 596 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 597 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 598 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr); 599 w = (Vec_MPI *)(*vv)->data; 600 /* Create local representation */ 601 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 602 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 603 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 604 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 605 606 /* 607 Create scatter context for scattering (updating) ghost values 608 */ 609 ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 610 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 611 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 612 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 613 ierr = ISDestroy(&to);CHKERRQ(ierr); 614 ierr = ISDestroy(&from);CHKERRQ(ierr); 615 616 /* set local to global mapping for ghosted vector */ 617 nb = n/bs; 618 ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 619 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 620 for (i=0; i<nb; i++) { 621 indices[i] = rstart + i*bs; 622 } 623 for (i=0; i<nghost; i++) { 624 indices[nb+i] = ghosts[i]; 625 } 626 ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 627 ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr); 628 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 629 PetscFunctionReturn(0); 630 } 631 632 #undef __FUNCT__ 633 #define __FUNCT__ "VecCreateGhostBlock" 634 /*@ 635 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 636 The indicing of the ghost points is done with blocks. 637 638 Collective on MPI_Comm 639 640 Input Parameters: 641 + comm - the MPI communicator to use 642 . bs - the block size 643 . n - local vector length 644 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 645 . nghost - number of local ghost blocks 646 - ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted) 647 648 Output Parameter: 649 . vv - the global vector representation (without ghost points as part of vector) 650 651 Notes: 652 Use VecGhostGetLocalForm() to access the local, ghosted representation 653 of the vector. 654 655 n is the local vector size (total local size not the number of blocks) while nghost 656 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 657 portion is bs*nghost 658 659 Level: advanced 660 661 Concepts: vectors^ghosted 662 663 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 664 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 665 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 666 667 @*/ 668 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 669 { 670 PetscErrorCode ierr; 671 672 PetscFunctionBegin; 673 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 674 PetscFunctionReturn(0); 675 } 676