1 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/ 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "VecDot_MPI" 9 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 10 { 11 PetscScalar sum,work; 12 PetscErrorCode ierr; 13 14 PetscFunctionBegin; 15 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 16 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr); 17 *z = sum; 18 PetscFunctionReturn(0); 19 } 20 21 #undef __FUNCT__ 22 #define __FUNCT__ "VecTDot_MPI" 23 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 24 { 25 PetscScalar sum,work; 26 PetscErrorCode ierr; 27 28 PetscFunctionBegin; 29 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 30 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr); 31 *z = sum; 32 PetscFunctionReturn(0); 33 } 34 35 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 36 37 #undef __FUNCT__ 38 #define __FUNCT__ "VecPlaceArray_MPI" 39 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 40 { 41 PetscErrorCode ierr; 42 Vec_MPI *v = (Vec_MPI*)vin->data; 43 44 PetscFunctionBegin; 45 if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 46 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 47 v->array = (PetscScalar*)a; 48 if (v->localrep) { 49 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 50 } 51 PetscFunctionReturn(0); 52 } 53 54 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]); 55 56 #undef __FUNCT__ 57 #define __FUNCT__ "VecDuplicate_MPI" 58 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 59 { 60 PetscErrorCode ierr; 61 Vec_MPI *vw,*w = (Vec_MPI*)win->data; 62 PetscScalar *array; 63 64 PetscFunctionBegin; 65 ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr); 66 ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr); 67 68 ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr); 69 vw = (Vec_MPI*)(*v)->data; 70 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 71 72 /* save local representation of the parallel vector (and scatter) if it exists */ 73 if (w->localrep) { 74 ierr = VecGetArray(*v,&array);CHKERRQ(ierr); 75 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 76 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 77 ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr); 78 ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr); 79 80 vw->localupdate = w->localupdate; 81 if (vw->localupdate) { 82 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 83 } 84 } 85 86 /* New vector should inherit stashing property of parent */ 87 (*v)->stash.donotstash = win->stash.donotstash; 88 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 89 90 ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr); 91 ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr); 92 93 (*v)->map->bs = PetscAbs(win->map->bs); 94 (*v)->bstash.bs = win->bstash.bs; 95 PetscFunctionReturn(0); 96 } 97 98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool); 99 extern PetscErrorCode VecResetArray_MPI(Vec); 100 101 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */ 102 VecDuplicateVecs_Default, 103 VecDestroyVecs_Default, 104 VecDot_MPI, 105 VecMDot_MPI, 106 VecNorm_MPI, 107 VecTDot_MPI, 108 VecMTDot_MPI, 109 VecScale_Seq, 110 VecCopy_Seq, /* 10 */ 111 VecSet_Seq, 112 VecSwap_Seq, 113 VecAXPY_Seq, 114 VecAXPBY_Seq, 115 VecMAXPY_Seq, 116 VecAYPX_Seq, 117 VecWAXPY_Seq, 118 VecAXPBYPCZ_Seq, 119 VecPointwiseMult_Seq, 120 VecPointwiseDivide_Seq, 121 VecSetValues_MPI, /* 20 */ 122 VecAssemblyBegin_MPI, 123 VecAssemblyEnd_MPI, 124 0, 125 VecGetSize_MPI, 126 VecGetSize_Seq, 127 0, 128 VecMax_MPI, 129 VecMin_MPI, 130 VecSetRandom_Seq, 131 VecSetOption_MPI, 132 VecSetValuesBlocked_MPI, 133 VecDestroy_MPI, 134 VecView_MPI, 135 VecPlaceArray_MPI, 136 VecReplaceArray_Seq, 137 VecDot_Seq, 138 VecTDot_Seq, 139 VecNorm_Seq, 140 VecMDot_Seq, 141 VecMTDot_Seq, 142 VecLoad_Default, 143 VecReciprocal_Default, 144 VecConjugate_Seq, 145 0, 146 0, 147 VecResetArray_MPI, 148 0, 149 VecMaxPointwiseDivide_Seq, 150 VecPointwiseMax_Seq, 151 VecPointwiseMaxAbs_Seq, 152 VecPointwiseMin_Seq, 153 VecGetValues_MPI, 154 0, 155 0, 156 0, 157 0, 158 0, 159 0, 160 VecStrideGather_Default, 161 VecStrideScatter_Default, 162 0, 163 0, 164 0, 165 0, 166 0, 167 VecStrideSubSetGather_Default, 168 VecStrideSubSetScatter_Default, 169 0, 170 0 171 }; 172 173 #undef __FUNCT__ 174 #define __FUNCT__ "VecCreate_MPI_Private" 175 /* 176 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 177 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 178 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 179 180 If alloc is true and array is NULL then this routine allocates the space, otherwise 181 no space is allocated. 182 */ 183 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[]) 184 { 185 Vec_MPI *s; 186 PetscErrorCode ierr; 187 188 PetscFunctionBegin; 189 ierr = PetscNewLog(v,&s);CHKERRQ(ierr); 190 v->data = (void*)s; 191 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 192 s->nghost = nghost; 193 v->petscnative = PETSC_TRUE; 194 195 ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr); 196 197 s->array = (PetscScalar*)array; 198 s->array_allocated = 0; 199 if (alloc && !array) { 200 PetscInt n = v->map->n+nghost; 201 ierr = PetscMalloc1(n,&s->array);CHKERRQ(ierr); 202 ierr = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr); 203 ierr = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr); 204 s->array_allocated = s->array; 205 } 206 207 /* By default parallel vectors do not have local representation */ 208 s->localrep = 0; 209 s->localupdate = 0; 210 211 v->stash.insertmode = NOT_SET_VALUES; 212 /* create the stashes. The block-size for bstash is set later when 213 VecSetValuesBlocked is called. 214 */ 215 ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr); 216 ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr); 217 218 #if defined(PETSC_HAVE_MATLAB_ENGINE) 219 ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr); 220 ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr); 221 #endif 222 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 223 PetscFunctionReturn(0); 224 } 225 226 /*MC 227 VECMPI - VECMPI = "mpi" - The basic parallel vector 228 229 Options Database Keys: 230 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 231 232 Level: beginner 233 234 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 235 M*/ 236 237 #undef __FUNCT__ 238 #define __FUNCT__ "VecCreate_MPI" 239 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv) 240 { 241 PetscErrorCode ierr; 242 243 PetscFunctionBegin; 244 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr); 245 PetscFunctionReturn(0); 246 } 247 248 /*MC 249 VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process 250 251 Options Database Keys: 252 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions() 253 254 Level: beginner 255 256 .seealso: VecCreateSeq(), VecCreateMPI() 257 M*/ 258 259 #undef __FUNCT__ 260 #define __FUNCT__ "VecCreate_Standard" 261 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v) 262 { 263 PetscErrorCode ierr; 264 PetscMPIInt size; 265 266 PetscFunctionBegin; 267 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr); 268 if (size == 1) { 269 ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr); 270 } else { 271 ierr = VecSetType(v,VECMPI);CHKERRQ(ierr); 272 } 273 PetscFunctionReturn(0); 274 } 275 276 #undef __FUNCT__ 277 #define __FUNCT__ "VecCreateMPIWithArray" 278 /*@C 279 VecCreateMPIWithArray - Creates a parallel, array-style vector, 280 where the user provides the array space to store the vector values. 281 282 Collective on MPI_Comm 283 284 Input Parameters: 285 + comm - the MPI communicator to use 286 . bs - block size, same meaning as VecSetBlockSize() 287 . n - local vector length, cannot be PETSC_DECIDE 288 . N - global vector length (or PETSC_DECIDE to have calculated) 289 - array - the user provided array to store the vector values 290 291 Output Parameter: 292 . vv - the vector 293 294 Notes: 295 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 296 same type as an existing vector. 297 298 If the user-provided array is NULL, then VecPlaceArray() can be used 299 at a later stage to SET the array for storing the vector values. 300 301 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 302 The user should not free the array until the vector is destroyed. 303 304 Level: intermediate 305 306 Concepts: vectors^creating with array 307 308 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 309 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 310 311 @*/ 312 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 313 { 314 PetscErrorCode ierr; 315 316 PetscFunctionBegin; 317 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 318 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 319 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 320 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 321 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 322 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 323 PetscFunctionReturn(0); 324 } 325 326 #undef __FUNCT__ 327 #define __FUNCT__ "VecCreateGhostWithArray" 328 /*@C 329 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 330 the caller allocates the array space. 331 332 Collective on MPI_Comm 333 334 Input Parameters: 335 + comm - the MPI communicator to use 336 . n - local vector length 337 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 338 . nghost - number of local ghost points 339 . ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted) 340 - array - the space to store the vector values (as long as n + nghost) 341 342 Output Parameter: 343 . vv - the global vector representation (without ghost points as part of vector) 344 345 Notes: 346 Use VecGhostGetLocalForm() to access the local, ghosted representation 347 of the vector. 348 349 This also automatically sets the ISLocalToGlobalMapping() for this vector. 350 351 Level: advanced 352 353 Concepts: vectors^creating with array 354 Concepts: vectors^ghosted 355 356 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 357 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 358 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 359 360 @*/ 361 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 362 { 363 PetscErrorCode ierr; 364 Vec_MPI *w; 365 PetscScalar *larray; 366 IS from,to; 367 ISLocalToGlobalMapping ltog; 368 PetscInt rstart,i,*indices; 369 370 PetscFunctionBegin; 371 *vv = 0; 372 373 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 374 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 375 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 376 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 377 /* Create global representation */ 378 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 379 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 380 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr); 381 w = (Vec_MPI*)(*vv)->data; 382 /* Create local representation */ 383 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 384 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 385 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr); 386 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 387 388 /* 389 Create scatter context for scattering (updating) ghost values 390 */ 391 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 392 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 393 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 394 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr); 395 ierr = ISDestroy(&to);CHKERRQ(ierr); 396 ierr = ISDestroy(&from);CHKERRQ(ierr); 397 398 /* set local to global mapping for ghosted vector */ 399 ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr); 400 ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr); 401 for (i=0; i<n; i++) { 402 indices[i] = rstart + i; 403 } 404 for (i=0; i<nghost; i++) { 405 indices[n+i] = ghosts[i]; 406 } 407 ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 408 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 409 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 410 PetscFunctionReturn(0); 411 } 412 413 #undef __FUNCT__ 414 #define __FUNCT__ "VecCreateGhost" 415 /*@ 416 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 417 418 Collective on MPI_Comm 419 420 Input Parameters: 421 + comm - the MPI communicator to use 422 . n - local vector length 423 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 424 . nghost - number of local ghost points 425 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 426 427 Output Parameter: 428 . vv - the global vector representation (without ghost points as part of vector) 429 430 Notes: 431 Use VecGhostGetLocalForm() to access the local, ghosted representation 432 of the vector. 433 434 This also automatically sets the ISLocalToGlobalMapping() for this vector. 435 436 Level: advanced 437 438 Concepts: vectors^ghosted 439 440 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 441 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 442 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 443 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 444 445 @*/ 446 PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 447 { 448 PetscErrorCode ierr; 449 450 PetscFunctionBegin; 451 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 452 PetscFunctionReturn(0); 453 } 454 455 #undef __FUNCT__ 456 #define __FUNCT__ "VecMPISetGhost" 457 /*@ 458 VecMPISetGhost - Sets the ghost points for an MPI ghost vector 459 460 Collective on Vec 461 462 Input Parameters: 463 + vv - the MPI vector 464 . nghost - number of local ghost points 465 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 466 467 468 Notes: 469 Use VecGhostGetLocalForm() to access the local, ghosted representation 470 of the vector. 471 472 This also automatically sets the ISLocalToGlobalMapping() for this vector. 473 474 You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()). 475 476 Level: advanced 477 478 Concepts: vectors^ghosted 479 480 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 481 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 482 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 483 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 484 485 @*/ 486 PetscErrorCode VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[]) 487 { 488 PetscErrorCode ierr; 489 PetscBool flg; 490 491 PetscFunctionBegin; 492 ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr); 493 /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */ 494 if (flg) { 495 PetscInt n,N; 496 Vec_MPI *w; 497 PetscScalar *larray; 498 IS from,to; 499 ISLocalToGlobalMapping ltog; 500 PetscInt rstart,i,*indices; 501 MPI_Comm comm; 502 503 ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr); 504 n = vv->map->n; 505 N = vv->map->N; 506 ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr); 507 ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr); 508 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr); 509 w = (Vec_MPI*)(vv)->data; 510 /* Create local representation */ 511 ierr = VecGetArray(vv,&larray);CHKERRQ(ierr); 512 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 513 ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr); 514 ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr); 515 516 /* 517 Create scatter context for scattering (updating) ghost values 518 */ 519 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 520 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 521 ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 522 ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr); 523 ierr = ISDestroy(&to);CHKERRQ(ierr); 524 ierr = ISDestroy(&from);CHKERRQ(ierr); 525 526 /* set local to global mapping for ghosted vector */ 527 ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr); 528 ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr); 529 530 for (i=0; i<n; i++) indices[i] = rstart + i; 531 for (i=0; i<nghost; i++) indices[n+i] = ghosts[i]; 532 533 ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 534 ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr); 535 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 536 } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting"); 537 else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting"); 538 PetscFunctionReturn(0); 539 } 540 541 542 /* ------------------------------------------------------------------------------------------*/ 543 #undef __FUNCT__ 544 #define __FUNCT__ "VecCreateGhostBlockWithArray" 545 /*@C 546 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 547 the caller allocates the array space. Indices in the ghost region are based on blocks. 548 549 Collective on MPI_Comm 550 551 Input Parameters: 552 + comm - the MPI communicator to use 553 . bs - block size 554 . n - local vector length 555 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 556 . nghost - number of local ghost blocks 557 . ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted) 558 - array - the space to store the vector values (as long as n + nghost*bs) 559 560 Output Parameter: 561 . vv - the global vector representation (without ghost points as part of vector) 562 563 Notes: 564 Use VecGhostGetLocalForm() to access the local, ghosted representation 565 of the vector. 566 567 n is the local vector size (total local size not the number of blocks) while nghost 568 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 569 portion is bs*nghost 570 571 Level: advanced 572 573 Concepts: vectors^creating ghosted 574 Concepts: vectors^creating with array 575 576 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 577 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 578 VecCreateGhostWithArray(), VecCreateGhostBlock() 579 580 @*/ 581 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 582 { 583 PetscErrorCode ierr; 584 Vec_MPI *w; 585 PetscScalar *larray; 586 IS from,to; 587 ISLocalToGlobalMapping ltog; 588 PetscInt rstart,i,nb,*indices; 589 590 PetscFunctionBegin; 591 *vv = 0; 592 593 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 594 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 595 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 596 if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size"); 597 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 598 /* Create global representation */ 599 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 600 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 601 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 602 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr); 603 w = (Vec_MPI*)(*vv)->data; 604 /* Create local representation */ 605 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 606 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 607 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr); 608 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 609 610 /* 611 Create scatter context for scattering (updating) ghost values 612 */ 613 ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 614 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 615 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 616 ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr); 617 ierr = ISDestroy(&to);CHKERRQ(ierr); 618 ierr = ISDestroy(&from);CHKERRQ(ierr); 619 620 /* set local to global mapping for ghosted vector */ 621 nb = n/bs; 622 ierr = PetscMalloc1((nb+nghost),&indices);CHKERRQ(ierr); 623 ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr); 624 625 for (i=0; i<nb; i++) indices[i] = rstart + i; 626 for (i=0; i<nghost; i++) indices[nb+i] = ghosts[i]; 627 628 ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 629 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 630 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 631 PetscFunctionReturn(0); 632 } 633 634 #undef __FUNCT__ 635 #define __FUNCT__ "VecCreateGhostBlock" 636 /*@ 637 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 638 The indicing of the ghost points is done with blocks. 639 640 Collective on MPI_Comm 641 642 Input Parameters: 643 + comm - the MPI communicator to use 644 . bs - the block size 645 . n - local vector length 646 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 647 . nghost - number of local ghost blocks 648 - ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted) 649 650 Output Parameter: 651 . vv - the global vector representation (without ghost points as part of vector) 652 653 Notes: 654 Use VecGhostGetLocalForm() to access the local, ghosted representation 655 of the vector. 656 657 n is the local vector size (total local size not the number of blocks) while nghost 658 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 659 portion is bs*nghost 660 661 Level: advanced 662 663 Concepts: vectors^ghosted 664 665 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 666 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 667 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 668 669 @*/ 670 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 671 { 672 PetscErrorCode ierr; 673 674 PetscFunctionBegin; 675 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 676 PetscFunctionReturn(0); 677 } 678