1 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/ 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "VecDot_MPI" 9 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 10 { 11 PetscScalar sum,work; 12 PetscErrorCode ierr; 13 14 PetscFunctionBegin; 15 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 16 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr); 17 *z = sum; 18 PetscFunctionReturn(0); 19 } 20 21 #undef __FUNCT__ 22 #define __FUNCT__ "VecTDot_MPI" 23 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 24 { 25 PetscScalar sum,work; 26 PetscErrorCode ierr; 27 28 PetscFunctionBegin; 29 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 30 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr); 31 *z = sum; 32 PetscFunctionReturn(0); 33 } 34 35 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 36 37 #undef __FUNCT__ 38 #define __FUNCT__ "VecPlaceArray_MPI" 39 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 40 { 41 PetscErrorCode ierr; 42 Vec_MPI *v = (Vec_MPI*)vin->data; 43 44 PetscFunctionBegin; 45 if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 46 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 47 v->array = (PetscScalar*)a; 48 if (v->localrep) { 49 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 50 } 51 PetscFunctionReturn(0); 52 } 53 54 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]); 55 56 #undef __FUNCT__ 57 #define __FUNCT__ "VecDuplicate_MPI" 58 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 59 { 60 PetscErrorCode ierr; 61 Vec_MPI *vw,*w = (Vec_MPI*)win->data; 62 PetscScalar *array; 63 64 PetscFunctionBegin; 65 ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr); 66 ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr); 67 68 ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr); 69 vw = (Vec_MPI*)(*v)->data; 70 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 71 72 /* save local representation of the parallel vector (and scatter) if it exists */ 73 if (w->localrep) { 74 ierr = VecGetArray(*v,&array);CHKERRQ(ierr); 75 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 76 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 77 ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr); 78 ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr); 79 80 vw->localupdate = w->localupdate; 81 if (vw->localupdate) { 82 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 83 } 84 } 85 86 /* New vector should inherit stashing property of parent */ 87 (*v)->stash.donotstash = win->stash.donotstash; 88 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 89 90 ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr); 91 ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr); 92 93 (*v)->map->bs = PetscAbs(win->map->bs); 94 (*v)->bstash.bs = win->bstash.bs; 95 PetscFunctionReturn(0); 96 } 97 98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool); 99 extern PetscErrorCode VecResetArray_MPI(Vec); 100 101 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */ 102 VecDuplicateVecs_Default, 103 VecDestroyVecs_Default, 104 VecDot_MPI, 105 VecMDot_MPI, 106 VecNorm_MPI, 107 VecTDot_MPI, 108 VecMTDot_MPI, 109 VecScale_Seq, 110 VecCopy_Seq, /* 10 */ 111 VecSet_Seq, 112 VecSwap_Seq, 113 VecAXPY_Seq, 114 VecAXPBY_Seq, 115 VecMAXPY_Seq, 116 VecAYPX_Seq, 117 VecWAXPY_Seq, 118 VecAXPBYPCZ_Seq, 119 VecPointwiseMult_Seq, 120 VecPointwiseDivide_Seq, 121 VecSetValues_MPI, /* 20 */ 122 VecAssemblyBegin_MPI, 123 VecAssemblyEnd_MPI, 124 0, 125 VecGetSize_MPI, 126 VecGetSize_Seq, 127 0, 128 VecMax_MPI, 129 VecMin_MPI, 130 VecSetRandom_Seq, 131 VecSetOption_MPI, 132 VecSetValuesBlocked_MPI, 133 VecDestroy_MPI, 134 VecView_MPI, 135 VecPlaceArray_MPI, 136 VecReplaceArray_Seq, 137 VecDot_Seq, 138 VecTDot_Seq, 139 VecNorm_Seq, 140 VecMDot_Seq, 141 VecMTDot_Seq, 142 VecLoad_Default, 143 VecReciprocal_Default, 144 VecConjugate_Seq, 145 0, 146 0, 147 VecResetArray_MPI, 148 0, 149 VecMaxPointwiseDivide_Seq, 150 VecPointwiseMax_Seq, 151 VecPointwiseMaxAbs_Seq, 152 VecPointwiseMin_Seq, 153 VecGetValues_MPI, 154 0, 155 0, 156 0, 157 0, 158 0, 159 0, 160 VecStrideGather_Default, 161 VecStrideScatter_Default, 162 0, 163 0, 164 0, 165 0, 166 0, 167 VecStrideSubSetGather_Default, 168 VecStrideSubSetScatter_Default 169 }; 170 171 #undef __FUNCT__ 172 #define __FUNCT__ "VecCreate_MPI_Private" 173 /* 174 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 175 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 176 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 177 178 If alloc is true and array is NULL then this routine allocates the space, otherwise 179 no space is allocated. 180 */ 181 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[]) 182 { 183 Vec_MPI *s; 184 PetscErrorCode ierr; 185 186 PetscFunctionBegin; 187 ierr = PetscNewLog(v,&s);CHKERRQ(ierr); 188 v->data = (void*)s; 189 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 190 s->nghost = nghost; 191 v->petscnative = PETSC_TRUE; 192 193 ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr); 194 195 s->array = (PetscScalar*)array; 196 s->array_allocated = 0; 197 if (alloc && !array) { 198 PetscInt n = v->map->n+nghost; 199 ierr = PetscMalloc1(n,&s->array);CHKERRQ(ierr); 200 ierr = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr); 201 ierr = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr); 202 s->array_allocated = s->array; 203 } 204 205 /* By default parallel vectors do not have local representation */ 206 s->localrep = 0; 207 s->localupdate = 0; 208 209 v->stash.insertmode = NOT_SET_VALUES; 210 /* create the stashes. The block-size for bstash is set later when 211 VecSetValuesBlocked is called. 212 */ 213 ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr); 214 ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr); 215 216 #if defined(PETSC_HAVE_MATLAB_ENGINE) 217 ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr); 218 ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr); 219 #endif 220 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 221 PetscFunctionReturn(0); 222 } 223 224 /*MC 225 VECMPI - VECMPI = "mpi" - The basic parallel vector 226 227 Options Database Keys: 228 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 229 230 Level: beginner 231 232 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 233 M*/ 234 235 #undef __FUNCT__ 236 #define __FUNCT__ "VecCreate_MPI" 237 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv) 238 { 239 PetscErrorCode ierr; 240 241 PetscFunctionBegin; 242 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr); 243 PetscFunctionReturn(0); 244 } 245 246 /*MC 247 VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process 248 249 Options Database Keys: 250 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions() 251 252 Level: beginner 253 254 .seealso: VecCreateSeq(), VecCreateMPI() 255 M*/ 256 257 #undef __FUNCT__ 258 #define __FUNCT__ "VecCreate_Standard" 259 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v) 260 { 261 PetscErrorCode ierr; 262 PetscMPIInt size; 263 264 PetscFunctionBegin; 265 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr); 266 if (size == 1) { 267 ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr); 268 } else { 269 ierr = VecSetType(v,VECMPI);CHKERRQ(ierr); 270 } 271 PetscFunctionReturn(0); 272 } 273 274 #undef __FUNCT__ 275 #define __FUNCT__ "VecCreateMPIWithArray" 276 /*@C 277 VecCreateMPIWithArray - Creates a parallel, array-style vector, 278 where the user provides the array space to store the vector values. 279 280 Collective on MPI_Comm 281 282 Input Parameters: 283 + comm - the MPI communicator to use 284 . bs - block size, same meaning as VecSetBlockSize() 285 . n - local vector length, cannot be PETSC_DECIDE 286 . N - global vector length (or PETSC_DECIDE to have calculated) 287 - array - the user provided array to store the vector values 288 289 Output Parameter: 290 . vv - the vector 291 292 Notes: 293 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 294 same type as an existing vector. 295 296 If the user-provided array is NULL, then VecPlaceArray() can be used 297 at a later stage to SET the array for storing the vector values. 298 299 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 300 The user should not free the array until the vector is destroyed. 301 302 Level: intermediate 303 304 Concepts: vectors^creating with array 305 306 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 307 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 308 309 @*/ 310 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 311 { 312 PetscErrorCode ierr; 313 314 PetscFunctionBegin; 315 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 316 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 317 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 318 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 319 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 320 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 321 PetscFunctionReturn(0); 322 } 323 324 #undef __FUNCT__ 325 #define __FUNCT__ "VecCreateGhostWithArray" 326 /*@C 327 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 328 the caller allocates the array space. 329 330 Collective on MPI_Comm 331 332 Input Parameters: 333 + comm - the MPI communicator to use 334 . n - local vector length 335 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 336 . nghost - number of local ghost points 337 . ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted) 338 - array - the space to store the vector values (as long as n + nghost) 339 340 Output Parameter: 341 . vv - the global vector representation (without ghost points as part of vector) 342 343 Notes: 344 Use VecGhostGetLocalForm() to access the local, ghosted representation 345 of the vector. 346 347 This also automatically sets the ISLocalToGlobalMapping() for this vector. 348 349 Level: advanced 350 351 Concepts: vectors^creating with array 352 Concepts: vectors^ghosted 353 354 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 355 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 356 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 357 358 @*/ 359 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 360 { 361 PetscErrorCode ierr; 362 Vec_MPI *w; 363 PetscScalar *larray; 364 IS from,to; 365 ISLocalToGlobalMapping ltog; 366 PetscInt rstart,i,*indices; 367 368 PetscFunctionBegin; 369 *vv = 0; 370 371 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 372 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 373 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 374 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 375 /* Create global representation */ 376 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 377 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 378 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr); 379 w = (Vec_MPI*)(*vv)->data; 380 /* Create local representation */ 381 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 382 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 383 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr); 384 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 385 386 /* 387 Create scatter context for scattering (updating) ghost values 388 */ 389 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 390 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 391 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 392 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr); 393 ierr = ISDestroy(&to);CHKERRQ(ierr); 394 ierr = ISDestroy(&from);CHKERRQ(ierr); 395 396 /* set local to global mapping for ghosted vector */ 397 ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr); 398 ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr); 399 for (i=0; i<n; i++) { 400 indices[i] = rstart + i; 401 } 402 for (i=0; i<nghost; i++) { 403 indices[n+i] = ghosts[i]; 404 } 405 ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 406 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 407 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 408 PetscFunctionReturn(0); 409 } 410 411 #undef __FUNCT__ 412 #define __FUNCT__ "VecCreateGhost" 413 /*@ 414 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 415 416 Collective on MPI_Comm 417 418 Input Parameters: 419 + comm - the MPI communicator to use 420 . n - local vector length 421 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 422 . nghost - number of local ghost points 423 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 424 425 Output Parameter: 426 . vv - the global vector representation (without ghost points as part of vector) 427 428 Notes: 429 Use VecGhostGetLocalForm() to access the local, ghosted representation 430 of the vector. 431 432 This also automatically sets the ISLocalToGlobalMapping() for this vector. 433 434 Level: advanced 435 436 Concepts: vectors^ghosted 437 438 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 439 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 440 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 441 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 442 443 @*/ 444 PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 445 { 446 PetscErrorCode ierr; 447 448 PetscFunctionBegin; 449 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 450 PetscFunctionReturn(0); 451 } 452 453 #undef __FUNCT__ 454 #define __FUNCT__ "VecMPISetGhost" 455 /*@ 456 VecMPISetGhost - Sets the ghost points for an MPI ghost vector 457 458 Collective on Vec 459 460 Input Parameters: 461 + vv - the MPI vector 462 . nghost - number of local ghost points 463 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 464 465 466 Notes: 467 Use VecGhostGetLocalForm() to access the local, ghosted representation 468 of the vector. 469 470 This also automatically sets the ISLocalToGlobalMapping() for this vector. 471 472 You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()). 473 474 Level: advanced 475 476 Concepts: vectors^ghosted 477 478 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 479 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 480 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 481 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 482 483 @*/ 484 PetscErrorCode VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[]) 485 { 486 PetscErrorCode ierr; 487 PetscBool flg; 488 489 PetscFunctionBegin; 490 ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr); 491 /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */ 492 if (flg) { 493 PetscInt n,N; 494 Vec_MPI *w; 495 PetscScalar *larray; 496 IS from,to; 497 ISLocalToGlobalMapping ltog; 498 PetscInt rstart,i,*indices; 499 MPI_Comm comm; 500 501 ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr); 502 n = vv->map->n; 503 N = vv->map->N; 504 ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr); 505 ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr); 506 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr); 507 w = (Vec_MPI*)(vv)->data; 508 /* Create local representation */ 509 ierr = VecGetArray(vv,&larray);CHKERRQ(ierr); 510 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 511 ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr); 512 ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr); 513 514 /* 515 Create scatter context for scattering (updating) ghost values 516 */ 517 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 518 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 519 ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 520 ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr); 521 ierr = ISDestroy(&to);CHKERRQ(ierr); 522 ierr = ISDestroy(&from);CHKERRQ(ierr); 523 524 /* set local to global mapping for ghosted vector */ 525 ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr); 526 ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr); 527 528 for (i=0; i<n; i++) indices[i] = rstart + i; 529 for (i=0; i<nghost; i++) indices[n+i] = ghosts[i]; 530 531 ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 532 ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr); 533 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 534 } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting"); 535 else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting"); 536 PetscFunctionReturn(0); 537 } 538 539 540 /* ------------------------------------------------------------------------------------------*/ 541 #undef __FUNCT__ 542 #define __FUNCT__ "VecCreateGhostBlockWithArray" 543 /*@C 544 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 545 the caller allocates the array space. Indices in the ghost region are based on blocks. 546 547 Collective on MPI_Comm 548 549 Input Parameters: 550 + comm - the MPI communicator to use 551 . bs - block size 552 . n - local vector length 553 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 554 . nghost - number of local ghost blocks 555 . ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted) 556 - array - the space to store the vector values (as long as n + nghost*bs) 557 558 Output Parameter: 559 . vv - the global vector representation (without ghost points as part of vector) 560 561 Notes: 562 Use VecGhostGetLocalForm() to access the local, ghosted representation 563 of the vector. 564 565 n is the local vector size (total local size not the number of blocks) while nghost 566 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 567 portion is bs*nghost 568 569 Level: advanced 570 571 Concepts: vectors^creating ghosted 572 Concepts: vectors^creating with array 573 574 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 575 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 576 VecCreateGhostWithArray(), VecCreateGhostBlock() 577 578 @*/ 579 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 580 { 581 PetscErrorCode ierr; 582 Vec_MPI *w; 583 PetscScalar *larray; 584 IS from,to; 585 ISLocalToGlobalMapping ltog; 586 PetscInt rstart,i,nb,*indices; 587 588 PetscFunctionBegin; 589 *vv = 0; 590 591 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 592 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 593 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 594 if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size"); 595 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 596 /* Create global representation */ 597 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 598 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 599 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 600 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr); 601 w = (Vec_MPI*)(*vv)->data; 602 /* Create local representation */ 603 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 604 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 605 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr); 606 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 607 608 /* 609 Create scatter context for scattering (updating) ghost values 610 */ 611 ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 612 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 613 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 614 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr); 615 ierr = ISDestroy(&to);CHKERRQ(ierr); 616 ierr = ISDestroy(&from);CHKERRQ(ierr); 617 618 /* set local to global mapping for ghosted vector */ 619 nb = n/bs; 620 ierr = PetscMalloc1((nb+nghost),&indices);CHKERRQ(ierr); 621 ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr); 622 623 for (i=0; i<nb; i++) indices[i] = rstart + i; 624 for (i=0; i<nghost; i++) indices[nb+i] = ghosts[i]; 625 626 ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 627 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 628 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 629 PetscFunctionReturn(0); 630 } 631 632 #undef __FUNCT__ 633 #define __FUNCT__ "VecCreateGhostBlock" 634 /*@ 635 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 636 The indicing of the ghost points is done with blocks. 637 638 Collective on MPI_Comm 639 640 Input Parameters: 641 + comm - the MPI communicator to use 642 . bs - the block size 643 . n - local vector length 644 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 645 . nghost - number of local ghost blocks 646 - ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted) 647 648 Output Parameter: 649 . vv - the global vector representation (without ghost points as part of vector) 650 651 Notes: 652 Use VecGhostGetLocalForm() to access the local, ghosted representation 653 of the vector. 654 655 n is the local vector size (total local size not the number of blocks) while nghost 656 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 657 portion is bs*nghost 658 659 Level: advanced 660 661 Concepts: vectors^ghosted 662 663 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 664 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 665 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 666 667 @*/ 668 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 669 { 670 PetscErrorCode ierr; 671 672 PetscFunctionBegin; 673 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 674 PetscFunctionReturn(0); 675 } 676