1 2 /* 3 This file contains routines for Parallel vector operations. 4 */ 5 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/ 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "VecDot_MPI" 9 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z) 10 { 11 PetscScalar sum,work; 12 PetscErrorCode ierr; 13 14 PetscFunctionBegin; 15 ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr); 16 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 17 *z = sum; 18 PetscFunctionReturn(0); 19 } 20 21 #undef __FUNCT__ 22 #define __FUNCT__ "VecTDot_MPI" 23 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z) 24 { 25 PetscScalar sum,work; 26 PetscErrorCode ierr; 27 28 PetscFunctionBegin; 29 ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr); 30 ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr); 31 *z = sum; 32 PetscFunctionReturn(0); 33 } 34 35 EXTERN_C_BEGIN 36 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer); 37 EXTERN_C_END 38 39 #undef __FUNCT__ 40 #define __FUNCT__ "VecPlaceArray_MPI" 41 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a) 42 { 43 PetscErrorCode ierr; 44 Vec_MPI *v = (Vec_MPI *)vin->data; 45 46 PetscFunctionBegin; 47 if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()"); 48 v->unplacedarray = v->array; /* save previous array so reset can bring it back */ 49 v->array = (PetscScalar *)a; 50 if (v->localrep) { 51 ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr); 52 } 53 PetscFunctionReturn(0); 54 } 55 56 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []); 57 58 #undef __FUNCT__ 59 #define __FUNCT__ "VecDuplicate_MPI" 60 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v) 61 { 62 PetscErrorCode ierr; 63 Vec_MPI *vw,*w = (Vec_MPI *)win->data; 64 PetscScalar *array; 65 66 PetscFunctionBegin; 67 ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr); 68 ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr); 69 70 ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr); 71 vw = (Vec_MPI *)(*v)->data; 72 ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 73 74 /* save local representation of the parallel vector (and scatter) if it exists */ 75 if (w->localrep) { 76 ierr = VecGetArray(*v,&array);CHKERRQ(ierr); 77 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->bs,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr); 78 ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr); 79 ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr); 80 ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr); 81 vw->localupdate = w->localupdate; 82 if (vw->localupdate) { 83 ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr); 84 } 85 } 86 87 /* New vector should inherit stashing property of parent */ 88 (*v)->stash.donotstash = win->stash.donotstash; 89 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 90 91 ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr); 92 ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr); 93 (*v)->map->bs = win->map->bs; 94 (*v)->bstash.bs = win->bstash.bs; 95 96 PetscFunctionReturn(0); 97 } 98 99 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool); 100 extern PetscErrorCode VecResetArray_MPI(Vec); 101 102 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */ 103 VecDuplicateVecs_Default, 104 VecDestroyVecs_Default, 105 VecDot_MPI, 106 VecMDot_MPI, 107 VecNorm_MPI, 108 VecTDot_MPI, 109 VecMTDot_MPI, 110 VecScale_Seq, 111 VecCopy_Seq, /* 10 */ 112 VecSet_Seq, 113 VecSwap_Seq, 114 VecAXPY_Seq, 115 VecAXPBY_Seq, 116 VecMAXPY_Seq, 117 VecAYPX_Seq, 118 VecWAXPY_Seq, 119 VecAXPBYPCZ_Seq, 120 VecPointwiseMult_Seq, 121 VecPointwiseDivide_Seq, 122 VecSetValues_MPI, /* 20 */ 123 VecAssemblyBegin_MPI, 124 VecAssemblyEnd_MPI, 125 0, 126 VecGetSize_MPI, 127 VecGetSize_Seq, 128 0, 129 VecMax_MPI, 130 VecMin_MPI, 131 VecSetRandom_Seq, 132 VecSetOption_MPI, 133 VecSetValuesBlocked_MPI, 134 VecDestroy_MPI, 135 VecView_MPI, 136 VecPlaceArray_MPI, 137 VecReplaceArray_Seq, 138 VecDot_Seq, 139 VecTDot_Seq, 140 VecNorm_Seq, 141 VecMDot_Seq, 142 VecMTDot_Seq, 143 VecLoad_Default, 144 VecReciprocal_Default, 145 VecConjugate_Seq, 146 0, 147 0, 148 VecResetArray_MPI, 149 0, 150 VecMaxPointwiseDivide_Seq, 151 VecPointwiseMax_Seq, 152 VecPointwiseMaxAbs_Seq, 153 VecPointwiseMin_Seq, 154 VecGetValues_MPI, 155 0, 156 0, 157 0, 158 0, 159 0, 160 0, 161 VecStrideGather_Default, 162 VecStrideScatter_Default 163 }; 164 165 #undef __FUNCT__ 166 #define __FUNCT__ "VecCreate_MPI_Private" 167 /* 168 VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()), 169 VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(), 170 VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared() 171 172 If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise 173 no space is allocated. 174 */ 175 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[]) 176 { 177 Vec_MPI *s; 178 PetscErrorCode ierr; 179 180 PetscFunctionBegin; 181 ierr = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr); 182 v->data = (void*)s; 183 ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr); 184 s->nghost = nghost; 185 v->petscnative = PETSC_TRUE; 186 187 ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr); 188 s->array = (PetscScalar *)array; 189 s->array_allocated = 0; 190 if (alloc && !array) { 191 PetscInt n = v->map->n+nghost; 192 ierr = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr); 193 ierr = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr); 194 ierr = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr); 195 s->array_allocated = s->array; 196 } 197 198 /* By default parallel vectors do not have local representation */ 199 s->localrep = 0; 200 s->localupdate = 0; 201 202 v->stash.insertmode = NOT_SET_VALUES; 203 /* create the stashes. The block-size for bstash is set later when 204 VecSetValuesBlocked is called. 205 */ 206 ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr); 207 ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr); 208 209 #if defined(PETSC_HAVE_MATLAB_ENGINE) 210 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr); 211 ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr); 212 #endif 213 ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr); 214 PetscFunctionReturn(0); 215 } 216 217 /*MC 218 VECMPI - VECMPI = "mpi" - The basic parallel vector 219 220 Options Database Keys: 221 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions() 222 223 Level: beginner 224 225 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi() 226 M*/ 227 228 EXTERN_C_BEGIN 229 #undef __FUNCT__ 230 #define __FUNCT__ "VecCreate_MPI" 231 PetscErrorCode VecCreate_MPI(Vec vv) 232 { 233 PetscErrorCode ierr; 234 235 PetscFunctionBegin; 236 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr); 237 PetscFunctionReturn(0); 238 } 239 EXTERN_C_END 240 241 /*MC 242 VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process 243 244 Options Database Keys: 245 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions() 246 247 Level: beginner 248 249 .seealso: VecCreateSeq(), VecCreateMPI() 250 M*/ 251 252 EXTERN_C_BEGIN 253 #undef __FUNCT__ 254 #define __FUNCT__ "VecCreate_Standard" 255 PetscErrorCode VecCreate_Standard(Vec v) 256 { 257 PetscErrorCode ierr; 258 PetscMPIInt size; 259 260 PetscFunctionBegin; 261 ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr); 262 if (size == 1) { 263 ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr); 264 } else { 265 ierr = VecSetType(v,VECMPI);CHKERRQ(ierr); 266 } 267 PetscFunctionReturn(0); 268 } 269 EXTERN_C_END 270 271 272 #undef __FUNCT__ 273 #define __FUNCT__ "VecCreateMPIWithArray" 274 /*@C 275 VecCreateMPIWithArray - Creates a parallel, array-style vector, 276 where the user provides the array space to store the vector values. 277 278 Collective on MPI_Comm 279 280 Input Parameters: 281 + comm - the MPI communicator to use 282 . bs - block size, same meaning as VecSetBlockSize() 283 . n - local vector length, cannot be PETSC_DECIDE 284 . N - global vector length (or PETSC_DECIDE to have calculated) 285 - array - the user provided array to store the vector values 286 287 Output Parameter: 288 . vv - the vector 289 290 Notes: 291 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 292 same type as an existing vector. 293 294 If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used 295 at a later stage to SET the array for storing the vector values. 296 297 PETSc does NOT free the array when the vector is destroyed via VecDestroy(). 298 The user should not free the array until the vector is destroyed. 299 300 Level: intermediate 301 302 Concepts: vectors^creating with array 303 304 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(), 305 VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray() 306 307 @*/ 308 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv) 309 { 310 PetscErrorCode ierr; 311 312 PetscFunctionBegin; 313 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector"); 314 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 315 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 316 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 317 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 318 ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 319 PetscFunctionReturn(0); 320 } 321 322 #undef __FUNCT__ 323 #define __FUNCT__ "VecCreateGhostWithArray" 324 /*@C 325 VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor; 326 the caller allocates the array space. 327 328 Collective on MPI_Comm 329 330 Input Parameters: 331 + comm - the MPI communicator to use 332 . n - local vector length 333 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 334 . nghost - number of local ghost points 335 . ghosts - global indices of ghost points (or PETSC_NULL if not needed), these do not need to be in increasing order (sorted) 336 - array - the space to store the vector values (as long as n + nghost) 337 338 Output Parameter: 339 . vv - the global vector representation (without ghost points as part of vector) 340 341 Notes: 342 Use VecGhostGetLocalForm() to access the local, ghosted representation 343 of the vector. 344 345 This also automatically sets the ISLocalToGlobalMapping() for this vector. 346 347 Level: advanced 348 349 Concepts: vectors^creating with array 350 Concepts: vectors^ghosted 351 352 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 353 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 354 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 355 356 @*/ 357 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 358 { 359 PetscErrorCode ierr; 360 Vec_MPI *w; 361 PetscScalar *larray; 362 IS from,to; 363 ISLocalToGlobalMapping ltog; 364 PetscInt rstart,i,*indices; 365 366 PetscFunctionBegin; 367 *vv = 0; 368 369 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 370 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 371 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 372 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 373 /* Create global representation */ 374 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 375 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 376 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr); 377 w = (Vec_MPI *)(*vv)->data; 378 /* Create local representation */ 379 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 380 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 381 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 382 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 383 384 /* 385 Create scatter context for scattering (updating) ghost values 386 */ 387 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 388 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 389 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 390 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 391 ierr = ISDestroy(&to);CHKERRQ(ierr); 392 ierr = ISDestroy(&from);CHKERRQ(ierr); 393 394 /* set local to global mapping for ghosted vector */ 395 ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 396 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 397 for (i=0; i<n; i++) { 398 indices[i] = rstart + i; 399 } 400 for (i=0; i<nghost; i++) { 401 indices[n+i] = ghosts[i]; 402 } 403 ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 404 ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr); 405 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 406 PetscFunctionReturn(0); 407 } 408 409 #undef __FUNCT__ 410 #define __FUNCT__ "VecCreateGhost" 411 /*@ 412 VecCreateGhost - Creates a parallel vector with ghost padding on each processor. 413 414 Collective on MPI_Comm 415 416 Input Parameters: 417 + comm - the MPI communicator to use 418 . n - local vector length 419 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 420 . nghost - number of local ghost points 421 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 422 423 Output Parameter: 424 . vv - the global vector representation (without ghost points as part of vector) 425 426 Notes: 427 Use VecGhostGetLocalForm() to access the local, ghosted representation 428 of the vector. 429 430 This also automatically sets the ISLocalToGlobalMapping() for this vector. 431 432 Level: advanced 433 434 Concepts: vectors^ghosted 435 436 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 437 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 438 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 439 VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost() 440 441 @*/ 442 PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 443 { 444 PetscErrorCode ierr; 445 446 PetscFunctionBegin; 447 ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 448 PetscFunctionReturn(0); 449 } 450 451 #undef __FUNCT__ 452 #define __FUNCT__ "VecMPISetGhost" 453 /*@ 454 VecMPISetGhost - Sets the ghost points for an MPI ghost vector 455 456 Collective on Vec 457 458 Input Parameters: 459 + vv - the MPI vector 460 . nghost - number of local ghost points 461 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted) 462 463 464 Notes: 465 Use VecGhostGetLocalForm() to access the local, ghosted representation 466 of the vector. 467 468 This also automatically sets the ISLocalToGlobalMapping() for this vector. 469 470 You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()). 471 472 Level: advanced 473 474 Concepts: vectors^ghosted 475 476 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 477 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(), 478 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(), 479 VecCreateGhostBlock(), VecCreateGhostBlockWithArray() 480 481 @*/ 482 PetscErrorCode VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[]) 483 { 484 PetscErrorCode ierr; 485 PetscBool flg; 486 487 PetscFunctionBegin; 488 ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr); 489 /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */ 490 if (flg) { 491 PetscInt n,N; 492 Vec_MPI *w; 493 PetscScalar *larray; 494 IS from,to; 495 ISLocalToGlobalMapping ltog; 496 PetscInt rstart,i,*indices; 497 MPI_Comm comm = ((PetscObject)vv)->comm; 498 499 n = vv->map->n; 500 N = vv->map->N; 501 ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr); 502 ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr); 503 ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);CHKERRQ(ierr); 504 w = (Vec_MPI *)(vv)->data; 505 /* Create local representation */ 506 ierr = VecGetArray(vv,&larray);CHKERRQ(ierr); 507 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr); 508 ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr); 509 ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr); 510 511 /* 512 Create scatter context for scattering (updating) ghost values 513 */ 514 ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 515 ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr); 516 ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 517 ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr); 518 ierr = ISDestroy(&to);CHKERRQ(ierr); 519 ierr = ISDestroy(&from);CHKERRQ(ierr); 520 521 /* set local to global mapping for ghosted vector */ 522 ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 523 ierr = VecGetOwnershipRange(vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 524 for (i=0; i<n; i++) { 525 indices[i] = rstart + i; 526 } 527 for (i=0; i<nghost; i++) { 528 indices[n+i] = ghosts[i]; 529 } 530 ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 531 ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr); 532 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 533 } else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting"); 534 else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting"); 535 PetscFunctionReturn(0); 536 } 537 538 539 /* ------------------------------------------------------------------------------------------*/ 540 #undef __FUNCT__ 541 #define __FUNCT__ "VecCreateGhostBlockWithArray" 542 /*@C 543 VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor; 544 the caller allocates the array space. Indices in the ghost region are based on blocks. 545 546 Collective on MPI_Comm 547 548 Input Parameters: 549 + comm - the MPI communicator to use 550 . bs - block size 551 . n - local vector length 552 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 553 . nghost - number of local ghost blocks 554 . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted) 555 - array - the space to store the vector values (as long as n + nghost*bs) 556 557 Output Parameter: 558 . vv - the global vector representation (without ghost points as part of vector) 559 560 Notes: 561 Use VecGhostGetLocalForm() to access the local, ghosted representation 562 of the vector. 563 564 n is the local vector size (total local size not the number of blocks) while nghost 565 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 566 portion is bs*nghost 567 568 Level: advanced 569 570 Concepts: vectors^creating ghosted 571 Concepts: vectors^creating with array 572 573 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 574 VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(), 575 VecCreateGhostWithArray(), VecCreateGhostBlock() 576 577 @*/ 578 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv) 579 { 580 PetscErrorCode ierr; 581 Vec_MPI *w; 582 PetscScalar *larray; 583 IS from,to; 584 ISLocalToGlobalMapping ltog; 585 PetscInt rstart,i,nb,*indices; 586 587 PetscFunctionBegin; 588 *vv = 0; 589 590 if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size"); 591 if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size"); 592 if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0"); 593 if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size"); 594 ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 595 /* Create global representation */ 596 ierr = VecCreate(comm,vv);CHKERRQ(ierr); 597 ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr); 598 ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr); 599 ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr); 600 w = (Vec_MPI *)(*vv)->data; 601 /* Create local representation */ 602 ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr); 603 ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr); 604 ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr); 605 ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr); 606 607 /* 608 Create scatter context for scattering (updating) ghost values 609 */ 610 ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr); 611 ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr); 612 ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr); 613 ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr); 614 ierr = ISDestroy(&to);CHKERRQ(ierr); 615 ierr = ISDestroy(&from);CHKERRQ(ierr); 616 617 /* set local to global mapping for ghosted vector */ 618 nb = n/bs; 619 ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr); 620 ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr); 621 for (i=0; i<nb; i++) { 622 indices[i] = rstart + i*bs; 623 } 624 for (i=0; i<nghost; i++) { 625 indices[nb+i] = ghosts[i]; 626 } 627 ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,<og);CHKERRQ(ierr); 628 ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr); 629 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 630 PetscFunctionReturn(0); 631 } 632 633 #undef __FUNCT__ 634 #define __FUNCT__ "VecCreateGhostBlock" 635 /*@ 636 VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor. 637 The indicing of the ghost points is done with blocks. 638 639 Collective on MPI_Comm 640 641 Input Parameters: 642 + comm - the MPI communicator to use 643 . bs - the block size 644 . n - local vector length 645 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 646 . nghost - number of local ghost blocks 647 - ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted) 648 649 Output Parameter: 650 . vv - the global vector representation (without ghost points as part of vector) 651 652 Notes: 653 Use VecGhostGetLocalForm() to access the local, ghosted representation 654 of the vector. 655 656 n is the local vector size (total local size not the number of blocks) while nghost 657 is the number of blocks in the ghost portion, i.e. the number of elements in the ghost 658 portion is bs*nghost 659 660 Level: advanced 661 662 Concepts: vectors^ghosted 663 664 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(), 665 VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 666 VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray() 667 668 @*/ 669 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv) 670 { 671 PetscErrorCode ierr; 672 673 PetscFunctionBegin; 674 ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr); 675 PetscFunctionReturn(0); 676 } 677