xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 878cb39724405637969dd932825d45eb0b1172ff)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
17   *z   = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "VecPlaceArray_MPI"
39 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
40 {
41   PetscErrorCode ierr;
42   Vec_MPI        *v = (Vec_MPI*)vin->data;
43 
44   PetscFunctionBegin;
45   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
46   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
47   v->array         = (PetscScalar*)a;
48   if (v->localrep) {
49     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]);
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "VecDuplicate_MPI"
58 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
59 {
60   PetscErrorCode ierr;
61   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
62   PetscScalar    *array;
63 
64   PetscFunctionBegin;
65   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
66   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
67 
68   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
69   vw   = (Vec_MPI*)(*v)->data;
70   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
71 
72   /* save local representation of the parallel vector (and scatter) if it exists */
73   if (w->localrep) {
74     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
75     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->bs,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
76     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
77     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
78     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
79 
80     vw->localupdate = w->localupdate;
81     if (vw->localupdate) {
82       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
83     }
84   }
85 
86   /* New vector should inherit stashing property of parent */
87   (*v)->stash.donotstash   = win->stash.donotstash;
88   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
89 
90   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
91   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
92 
93   (*v)->map->bs   = win->map->bs;
94   (*v)->bstash.bs = win->bstash.bs;
95   PetscFunctionReturn(0);
96 }
97 
98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
99 extern PetscErrorCode VecResetArray_MPI(Vec);
100 
101 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
102                                 VecDuplicateVecs_Default,
103                                 VecDestroyVecs_Default,
104                                 VecDot_MPI,
105                                 VecMDot_MPI,
106                                 VecNorm_MPI,
107                                 VecTDot_MPI,
108                                 VecMTDot_MPI,
109                                 VecScale_Seq,
110                                 VecCopy_Seq, /* 10 */
111                                 VecSet_Seq,
112                                 VecSwap_Seq,
113                                 VecAXPY_Seq,
114                                 VecAXPBY_Seq,
115                                 VecMAXPY_Seq,
116                                 VecAYPX_Seq,
117                                 VecWAXPY_Seq,
118                                 VecAXPBYPCZ_Seq,
119                                 VecPointwiseMult_Seq,
120                                 VecPointwiseDivide_Seq,
121                                 VecSetValues_MPI, /* 20 */
122                                 VecAssemblyBegin_MPI,
123                                 VecAssemblyEnd_MPI,
124                                 0,
125                                 VecGetSize_MPI,
126                                 VecGetSize_Seq,
127                                 0,
128                                 VecMax_MPI,
129                                 VecMin_MPI,
130                                 VecSetRandom_Seq,
131                                 VecSetOption_MPI,
132                                 VecSetValuesBlocked_MPI,
133                                 VecDestroy_MPI,
134                                 VecView_MPI,
135                                 VecPlaceArray_MPI,
136                                 VecReplaceArray_Seq,
137                                 VecDot_Seq,
138                                 VecTDot_Seq,
139                                 VecNorm_Seq,
140                                 VecMDot_Seq,
141                                 VecMTDot_Seq,
142                                 VecLoad_Default,
143                                 VecReciprocal_Default,
144                                 VecConjugate_Seq,
145                                 0,
146                                 0,
147                                 VecResetArray_MPI,
148                                 0,
149                                 VecMaxPointwiseDivide_Seq,
150                                 VecPointwiseMax_Seq,
151                                 VecPointwiseMaxAbs_Seq,
152                                 VecPointwiseMin_Seq,
153                                 VecGetValues_MPI,
154                                 0,
155                                 0,
156                                 0,
157                                 0,
158                                 0,
159                                 0,
160                                 VecStrideGather_Default,
161                                 VecStrideScatter_Default};
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "VecCreate_MPI_Private"
165 /*
166     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
167     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
168     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
169 
170     If alloc is true and array is NULL then this routine allocates the space, otherwise
171     no space is allocated.
172 */
173 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
174 {
175   Vec_MPI        *s;
176   PetscErrorCode ierr;
177 
178   PetscFunctionBegin;
179   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
180   v->data        = (void*)s;
181   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
182   s->nghost      = nghost;
183   v->petscnative = PETSC_TRUE;
184 
185   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
186 
187   s->array           = (PetscScalar*)array;
188   s->array_allocated = 0;
189   if (alloc && !array) {
190     PetscInt n = v->map->n+nghost;
191     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
192     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
193     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
194     s->array_allocated = s->array;
195   }
196 
197   /* By default parallel vectors do not have local representation */
198   s->localrep    = 0;
199   s->localupdate = 0;
200 
201   v->stash.insertmode = NOT_SET_VALUES;
202   /* create the stashes. The block-size for bstash is set later when
203      VecSetValuesBlocked is called.
204   */
205   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
206   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),v->map->bs,&v->bstash);CHKERRQ(ierr);
207 
208 #if defined(PETSC_HAVE_MATLAB_ENGINE)
209   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
210   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
211 #endif
212   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 
216 /*MC
217    VECMPI - VECMPI = "mpi" - The basic parallel vector
218 
219    Options Database Keys:
220 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
221 
222   Level: beginner
223 
224 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
225 M*/
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "VecCreate_MPI"
229 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
230 {
231   PetscErrorCode ierr;
232 
233   PetscFunctionBegin;
234   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
235   PetscFunctionReturn(0);
236 }
237 
238 /*MC
239    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
240 
241    Options Database Keys:
242 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
243 
244   Level: beginner
245 
246 .seealso: VecCreateSeq(), VecCreateMPI()
247 M*/
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "VecCreate_Standard"
251 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
252 {
253   PetscErrorCode ierr;
254   PetscMPIInt    size;
255 
256   PetscFunctionBegin;
257   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
258   if (size == 1) {
259     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
260   } else {
261     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "VecCreateMPIWithArray"
268 /*@C
269    VecCreateMPIWithArray - Creates a parallel, array-style vector,
270    where the user provides the array space to store the vector values.
271 
272    Collective on MPI_Comm
273 
274    Input Parameters:
275 +  comm  - the MPI communicator to use
276 .  bs    - block size, same meaning as VecSetBlockSize()
277 .  n     - local vector length, cannot be PETSC_DECIDE
278 .  N     - global vector length (or PETSC_DECIDE to have calculated)
279 -  array - the user provided array to store the vector values
280 
281    Output Parameter:
282 .  vv - the vector
283 
284    Notes:
285    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
286    same type as an existing vector.
287 
288    If the user-provided array is NULL, then VecPlaceArray() can be used
289    at a later stage to SET the array for storing the vector values.
290 
291    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
292    The user should not free the array until the vector is destroyed.
293 
294    Level: intermediate
295 
296    Concepts: vectors^creating with array
297 
298 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
299           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
300 
301 @*/
302 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
303 {
304   PetscErrorCode ierr;
305 
306   PetscFunctionBegin;
307   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
308   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
309   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
310   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
311   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
312   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
313   PetscFunctionReturn(0);
314 }
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "VecCreateGhostWithArray"
318 /*@C
319    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
320    the caller allocates the array space.
321 
322    Collective on MPI_Comm
323 
324    Input Parameters:
325 +  comm - the MPI communicator to use
326 .  n - local vector length
327 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
328 .  nghost - number of local ghost points
329 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
330 -  array - the space to store the vector values (as long as n + nghost)
331 
332    Output Parameter:
333 .  vv - the global vector representation (without ghost points as part of vector)
334 
335    Notes:
336    Use VecGhostGetLocalForm() to access the local, ghosted representation
337    of the vector.
338 
339    This also automatically sets the ISLocalToGlobalMapping() for this vector.
340 
341    Level: advanced
342 
343    Concepts: vectors^creating with array
344    Concepts: vectors^ghosted
345 
346 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
347           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
348           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
349 
350 @*/
351 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
352 {
353   PetscErrorCode         ierr;
354   Vec_MPI                *w;
355   PetscScalar            *larray;
356   IS                     from,to;
357   ISLocalToGlobalMapping ltog;
358   PetscInt               rstart,i,*indices;
359 
360   PetscFunctionBegin;
361   *vv = 0;
362 
363   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
364   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
365   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
366   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
367   /* Create global representation */
368   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
369   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
370   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
371   w    = (Vec_MPI*)(*vv)->data;
372   /* Create local representation */
373   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
374   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
375   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
376   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
377 
378   /*
379        Create scatter context for scattering (updating) ghost values
380   */
381   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
382   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
383   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
384   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
385   ierr = ISDestroy(&to);CHKERRQ(ierr);
386   ierr = ISDestroy(&from);CHKERRQ(ierr);
387 
388   /* set local to global mapping for ghosted vector */
389   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
390   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
391   for (i=0; i<n; i++) {
392     indices[i] = rstart + i;
393   }
394   for (i=0; i<nghost; i++) {
395     indices[n+i] = ghosts[i];
396   }
397   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
398   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
399   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
400   PetscFunctionReturn(0);
401 }
402 
403 #undef __FUNCT__
404 #define __FUNCT__ "VecCreateGhost"
405 /*@
406    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
407 
408    Collective on MPI_Comm
409 
410    Input Parameters:
411 +  comm - the MPI communicator to use
412 .  n - local vector length
413 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
414 .  nghost - number of local ghost points
415 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
416 
417    Output Parameter:
418 .  vv - the global vector representation (without ghost points as part of vector)
419 
420    Notes:
421    Use VecGhostGetLocalForm() to access the local, ghosted representation
422    of the vector.
423 
424    This also automatically sets the ISLocalToGlobalMapping() for this vector.
425 
426    Level: advanced
427 
428    Concepts: vectors^ghosted
429 
430 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
431           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
432           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
433           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
434 
435 @*/
436 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
437 {
438   PetscErrorCode ierr;
439 
440   PetscFunctionBegin;
441   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "VecMPISetGhost"
447 /*@
448    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
449 
450    Collective on Vec
451 
452    Input Parameters:
453 +  vv - the MPI vector
454 .  nghost - number of local ghost points
455 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
456 
457 
458    Notes:
459    Use VecGhostGetLocalForm() to access the local, ghosted representation
460    of the vector.
461 
462    This also automatically sets the ISLocalToGlobalMapping() for this vector.
463 
464    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
465 
466    Level: advanced
467 
468    Concepts: vectors^ghosted
469 
470 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
471           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
472           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
473           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
474 
475 @*/
476 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
477 {
478   PetscErrorCode ierr;
479   PetscBool      flg;
480 
481   PetscFunctionBegin;
482   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
483   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
484   if (flg) {
485     PetscInt               n,N;
486     Vec_MPI                *w;
487     PetscScalar            *larray;
488     IS                     from,to;
489     ISLocalToGlobalMapping ltog;
490     PetscInt               rstart,i,*indices;
491     MPI_Comm               comm;
492 
493     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
494     n    = vv->map->n;
495     N    = vv->map->N;
496     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
497     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
498     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
499     w    = (Vec_MPI*)(vv)->data;
500     /* Create local representation */
501     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
502     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
503     ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr);
504     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
505 
506     /*
507      Create scatter context for scattering (updating) ghost values
508      */
509     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
510     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
511     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
512     ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr);
513     ierr = ISDestroy(&to);CHKERRQ(ierr);
514     ierr = ISDestroy(&from);CHKERRQ(ierr);
515 
516     /* set local to global mapping for ghosted vector */
517     ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
518     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
519 
520     for (i=0; i<n; i++)      indices[i]   = rstart + i;
521     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
522 
523     ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
524     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
525     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
526   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
527   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
528   PetscFunctionReturn(0);
529 }
530 
531 
532 /* ------------------------------------------------------------------------------------------*/
533 #undef __FUNCT__
534 #define __FUNCT__ "VecCreateGhostBlockWithArray"
535 /*@C
536    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
537    the caller allocates the array space. Indices in the ghost region are based on blocks.
538 
539    Collective on MPI_Comm
540 
541    Input Parameters:
542 +  comm - the MPI communicator to use
543 .  bs - block size
544 .  n - local vector length
545 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
546 .  nghost - number of local ghost blocks
547 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
548 -  array - the space to store the vector values (as long as n + nghost*bs)
549 
550    Output Parameter:
551 .  vv - the global vector representation (without ghost points as part of vector)
552 
553    Notes:
554    Use VecGhostGetLocalForm() to access the local, ghosted representation
555    of the vector.
556 
557    n is the local vector size (total local size not the number of blocks) while nghost
558    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
559    portion is bs*nghost
560 
561    Level: advanced
562 
563    Concepts: vectors^creating ghosted
564    Concepts: vectors^creating with array
565 
566 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
567           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
568           VecCreateGhostWithArray(), VecCreateGhostBlock()
569 
570 @*/
571 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
572 {
573   PetscErrorCode         ierr;
574   Vec_MPI                *w;
575   PetscScalar            *larray;
576   IS                     from,to;
577   ISLocalToGlobalMapping ltog;
578   PetscInt               rstart,i,nb,*indices;
579 
580   PetscFunctionBegin;
581   *vv = 0;
582 
583   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
584   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
585   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
586   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
587   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
588   /* Create global representation */
589   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
590   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
591   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
592   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
593   w    = (Vec_MPI*)(*vv)->data;
594   /* Create local representation */
595   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
596   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
597   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
598   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
599 
600   /*
601        Create scatter context for scattering (updating) ghost values
602   */
603   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
604   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
605   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
606   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
607   ierr = ISDestroy(&to);CHKERRQ(ierr);
608   ierr = ISDestroy(&from);CHKERRQ(ierr);
609 
610   /* set local to global mapping for ghosted vector */
611   nb   = n/bs;
612   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
613   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
614 
615   for (i=0; i<nb; i++)      indices[i]    = rstart + i*bs;
616   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
617 
618   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
619   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
620   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
621   PetscFunctionReturn(0);
622 }
623 
624 #undef __FUNCT__
625 #define __FUNCT__ "VecCreateGhostBlock"
626 /*@
627    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
628         The indicing of the ghost points is done with blocks.
629 
630    Collective on MPI_Comm
631 
632    Input Parameters:
633 +  comm - the MPI communicator to use
634 .  bs - the block size
635 .  n - local vector length
636 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
637 .  nghost - number of local ghost blocks
638 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
639 
640    Output Parameter:
641 .  vv - the global vector representation (without ghost points as part of vector)
642 
643    Notes:
644    Use VecGhostGetLocalForm() to access the local, ghosted representation
645    of the vector.
646 
647    n is the local vector size (total local size not the number of blocks) while nghost
648    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
649    portion is bs*nghost
650 
651    Level: advanced
652 
653    Concepts: vectors^ghosted
654 
655 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
656           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
657           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
658 
659 @*/
660 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
661 {
662   PetscErrorCode ierr;
663 
664   PetscFunctionBegin;
665   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
666   PetscFunctionReturn(0);
667 }
668