xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision df0c820a74ebd738345a269415d8092a0843fdf3)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "../src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
17   *z = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 EXTERN_C_BEGIN
36 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
37 EXTERN_C_END
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "VecPlaceArray_MPI"
41 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
42 {
43   PetscErrorCode ierr;
44   Vec_MPI        *v = (Vec_MPI *)vin->data;
45 
46   PetscFunctionBegin;
47   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
48   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
49   v->array = (PetscScalar *)a;
50   if (v->localrep) {
51     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "VecDuplicate_MPI"
60 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
61 {
62   PetscErrorCode ierr;
63   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
64   PetscScalar    *array;
65 
66   PetscFunctionBegin;
67   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
68 
69   /* use the map that exists aleady in win */
70   ierr = PetscLayoutDestroy((*v)->map);CHKERRQ(ierr);
71   (*v)->map = win->map;
72   win->map->refcnt++;
73 
74   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
75   vw   = (Vec_MPI *)(*v)->data;
76   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
77 
78   /* save local representation of the parallel vector (and scatter) if it exists */
79   if (w->localrep) {
80     ierr = VecGetArrayPrivate(*v,&array);CHKERRQ(ierr);
81     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
82     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
83     ierr = VecRestoreArrayPrivate(*v,&array);CHKERRQ(ierr);
84     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
85     vw->localupdate = w->localupdate;
86     if (vw->localupdate) {
87       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
88     }
89   }
90 
91   /* New vector should inherit stashing property of parent */
92   (*v)->stash.donotstash = win->stash.donotstash;
93   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
94 
95   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
96   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
97   if (win->mapping) {
98     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
99     (*v)->mapping = win->mapping;
100   }
101   if (win->bmapping) {
102     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
103     (*v)->bmapping = win->bmapping;
104   }
105   (*v)->map->bs    = win->map->bs;
106   (*v)->bstash.bs = win->bstash.bs;
107 
108   PetscFunctionReturn(0);
109 }
110 
111 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
112 extern PetscErrorCode VecResetArray_MPI(Vec);
113 
114 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
115             VecDuplicateVecs_Default,
116             VecDestroyVecs_Default,
117             VecDot_MPI,
118             VecMDot_MPI,
119             VecNorm_MPI,
120             VecTDot_MPI,
121             VecMTDot_MPI,
122             VecScale_Seq,
123             VecCopy_Seq, /* 10 */
124             VecSet_Seq,
125             VecSwap_Seq,
126             VecAXPY_Seq,
127             VecAXPBY_Seq,
128             VecMAXPY_Seq,
129             VecAYPX_Seq,
130             VecWAXPY_Seq,
131             VecAXPBYPCZ_Seq,
132             VecPointwiseMult_Seq,
133             VecPointwiseDivide_Seq,
134             VecSetValues_MPI, /* 20 */
135             VecAssemblyBegin_MPI,
136             VecAssemblyEnd_MPI,
137             VecGetArray_Seq,
138             VecGetSize_MPI,
139             VecGetSize_Seq,
140             VecRestoreArray_Seq,
141             VecMax_MPI,
142             VecMin_MPI,
143             VecSetRandom_Seq,
144             VecSetOption_MPI,
145             VecSetValuesBlocked_MPI,
146             VecDestroy_MPI,
147             VecView_MPI,
148             VecPlaceArray_MPI,
149             VecReplaceArray_Seq,
150             VecDot_Seq,
151             VecTDot_Seq,
152             VecNorm_Seq,
153             VecMDot_Seq,
154             VecMTDot_Seq,
155 	    VecLoad_Default,
156             VecReciprocal_Default,
157             VecConjugate_Seq,
158             0,
159             0,
160             VecResetArray_MPI,
161             0,
162             VecMaxPointwiseDivide_Seq,
163             VecPointwiseMax_Seq,
164             VecPointwiseMaxAbs_Seq,
165             VecPointwiseMin_Seq,
166   	    VecGetValues_MPI,
167     	    0,
168     	    0,
169     	    0,
170     	    0,
171     	    0,
172     	    0,
173    	    VecStrideGather_Default,
174    	    VecStrideScatter_Default
175 };
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "VecCreate_MPI_Private"
179 /*
180     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
181     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
182     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
183 
184     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
185     no space is allocated.
186 */
187 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool  alloc,PetscInt nghost,const PetscScalar array[])
188 {
189   Vec_MPI        *s;
190   PetscErrorCode ierr;
191 
192   PetscFunctionBegin;
193 
194   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
195   v->data        = (void*)s;
196   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
197   s->nghost      = nghost;
198   v->mapping     = 0;
199   v->bmapping    = 0;
200   v->petscnative = PETSC_TRUE;
201 
202   if (v->map->bs == -1) v->map->bs = 1;
203   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
204   s->array           = (PetscScalar *)array;
205   s->array_allocated = 0;
206   if (alloc && !array) {
207     PetscInt n         = v->map->n+nghost;
208     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
209     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
210     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
211     s->array_allocated = s->array;
212   }
213 
214   /* By default parallel vectors do not have local representation */
215   s->localrep    = 0;
216   s->localupdate = 0;
217 
218   v->stash.insertmode  = NOT_SET_VALUES;
219   /* create the stashes. The block-size for bstash is set later when
220      VecSetValuesBlocked is called.
221   */
222   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
223   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
224 
225 #if defined(PETSC_HAVE_MATLAB_ENGINE)
226   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
227   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
228 #endif
229   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 /*MC
234    VECMPI - VECMPI = "mpi" - The basic parallel vector
235 
236    Options Database Keys:
237 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
238 
239   Level: beginner
240 
241 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
242 M*/
243 
244 EXTERN_C_BEGIN
245 #undef __FUNCT__
246 #define __FUNCT__ "VecCreate_MPI"
247 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
248 {
249   PetscErrorCode ierr;
250 
251   PetscFunctionBegin;
252   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 EXTERN_C_END
256 
257 /*MC
258    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
259 
260    Options Database Keys:
261 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
262 
263   Level: beginner
264 
265 .seealso: VecCreateSeq(), VecCreateMPI()
266 M*/
267 
268 EXTERN_C_BEGIN
269 #undef __FUNCT__
270 #define __FUNCT__ "VecCreate_Standard"
271 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Standard(Vec v)
272 {
273   PetscErrorCode ierr;
274   PetscMPIInt    size;
275 
276   PetscFunctionBegin;
277   ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
278   if (size == 1) {
279     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
280   } else {
281     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
282   }
283   PetscFunctionReturn(0);
284 }
285 EXTERN_C_END
286 
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "VecCreateMPIWithArray"
290 /*@C
291    VecCreateMPIWithArray - Creates a parallel, array-style vector,
292    where the user provides the array space to store the vector values.
293 
294    Collective on MPI_Comm
295 
296    Input Parameters:
297 +  comm  - the MPI communicator to use
298 .  n     - local vector length, cannot be PETSC_DECIDE
299 .  N     - global vector length (or PETSC_DECIDE to have calculated)
300 -  array - the user provided array to store the vector values
301 
302    Output Parameter:
303 .  vv - the vector
304 
305    Notes:
306    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
307    same type as an existing vector.
308 
309    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
310    at a later stage to SET the array for storing the vector values.
311 
312    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
313    The user should not free the array until the vector is destroyed.
314 
315    Level: intermediate
316 
317    Concepts: vectors^creating with array
318 
319 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
320           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
321 
322 @*/
323 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
324 {
325   PetscErrorCode ierr;
326 
327   PetscFunctionBegin;
328   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
329   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
330   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
331   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
332   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
333   PetscFunctionReturn(0);
334 }
335 
336 #undef __FUNCT__
337 #define __FUNCT__ "VecCreateGhostWithArray"
338 /*@C
339    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
340    the caller allocates the array space.
341 
342    Collective on MPI_Comm
343 
344    Input Parameters:
345 +  comm - the MPI communicator to use
346 .  n - local vector length
347 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
348 .  nghost - number of local ghost points
349 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
350 -  array - the space to store the vector values (as long as n + nghost)
351 
352    Output Parameter:
353 .  vv - the global vector representation (without ghost points as part of vector)
354 
355    Notes:
356    Use VecGhostGetLocalForm() to access the local, ghosted representation
357    of the vector.
358 
359    This also automatically sets the ISLocalToGlobalMapping() for this vector.
360 
361    Level: advanced
362 
363    Concepts: vectors^creating with array
364    Concepts: vectors^ghosted
365 
366 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
367           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
368           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
369 
370 @*/
371 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
372 {
373   PetscErrorCode         ierr;
374   Vec_MPI                *w;
375   PetscScalar            *larray;
376   IS                     from,to;
377   ISLocalToGlobalMapping ltog;
378   PetscInt               rstart,i,*indices;
379 
380   PetscFunctionBegin;
381   *vv = 0;
382 
383   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
384   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
385   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
386   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
387   /* Create global representation */
388   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
389   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
390   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
391   w    = (Vec_MPI *)(*vv)->data;
392   /* Create local representation */
393   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
394   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
395   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
396   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
397 
398   /*
399        Create scatter context for scattering (updating) ghost values
400   */
401   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
402   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
403   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
404   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
405   ierr = ISDestroy(to);CHKERRQ(ierr);
406   ierr = ISDestroy(from);CHKERRQ(ierr);
407 
408   /* set local to global mapping for ghosted vector */
409   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
410   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
411   for (i=0; i<n; i++) {
412     indices[i] = rstart + i;
413   }
414   for (i=0; i<nghost; i++) {
415     indices[n+i] = ghosts[i];
416   }
417   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
418   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
419   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
420   PetscFunctionReturn(0);
421 }
422 
423 #undef __FUNCT__
424 #define __FUNCT__ "VecCreateGhost"
425 /*@
426    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
427 
428    Collective on MPI_Comm
429 
430    Input Parameters:
431 +  comm - the MPI communicator to use
432 .  n - local vector length
433 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
434 .  nghost - number of local ghost points
435 -  ghosts - global indices of ghost points
436 
437    Output Parameter:
438 .  vv - the global vector representation (without ghost points as part of vector)
439 
440    Notes:
441    Use VecGhostGetLocalForm() to access the local, ghosted representation
442    of the vector.
443 
444    This also automatically sets the ISLocalToGlobalMapping() for this vector.
445 
446    Level: advanced
447 
448    Concepts: vectors^ghosted
449 
450 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
451           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
452           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
453           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
454 
455 @*/
456 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
457 {
458   PetscErrorCode ierr;
459 
460   PetscFunctionBegin;
461   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
462   PetscFunctionReturn(0);
463 }
464 
465 
466 /* ------------------------------------------------------------------------------------------*/
467 #undef __FUNCT__
468 #define __FUNCT__ "VecCreateGhostBlockWithArray"
469 /*@C
470    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
471    the caller allocates the array space. Indices in the ghost region are based on blocks.
472 
473    Collective on MPI_Comm
474 
475    Input Parameters:
476 +  comm - the MPI communicator to use
477 .  bs - block size
478 .  n - local vector length
479 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
480 .  nghost - number of local ghost blocks
481 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index
482 -  array - the space to store the vector values (as long as n + nghost*bs)
483 
484    Output Parameter:
485 .  vv - the global vector representation (without ghost points as part of vector)
486 
487    Notes:
488    Use VecGhostGetLocalForm() to access the local, ghosted representation
489    of the vector.
490 
491    n is the local vector size (total local size not the number of blocks) while nghost
492    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
493    portion is bs*nghost
494 
495    Level: advanced
496 
497    Concepts: vectors^creating ghosted
498    Concepts: vectors^creating with array
499 
500 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
501           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
502           VecCreateGhostWithArray(), VecCreateGhostBlocked()
503 
504 @*/
505 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
506 {
507   PetscErrorCode ierr;
508   Vec_MPI        *w;
509   PetscScalar    *larray;
510   IS             from,to;
511   ISLocalToGlobalMapping ltog;
512   PetscInt       rstart,i,nb,*indices;
513 
514   PetscFunctionBegin;
515   *vv = 0;
516 
517   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
518   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
519   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
520   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
521   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
522   /* Create global representation */
523   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
524   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
525   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
526   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
527   w    = (Vec_MPI *)(*vv)->data;
528   /* Create local representation */
529   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
530   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
531   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
532   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
533   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
534 
535   /*
536        Create scatter context for scattering (updating) ghost values
537   */
538   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
539   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
540   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
541   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
542   ierr = ISDestroy(to);CHKERRQ(ierr);
543   ierr = ISDestroy(from);CHKERRQ(ierr);
544 
545   /* set local to global mapping for ghosted vector */
546   nb = n/bs;
547   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
548   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
549   for (i=0; i<nb; i++) {
550     indices[i] = rstart + i*bs;
551   }
552   for (i=0; i<nghost; i++) {
553     indices[nb+i] = ghosts[i];
554   }
555   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
556   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
557   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
558   PetscFunctionReturn(0);
559 }
560 
561 #undef __FUNCT__
562 #define __FUNCT__ "VecCreateGhostBlock"
563 /*@
564    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
565         The indicing of the ghost points is done with blocks.
566 
567    Collective on MPI_Comm
568 
569    Input Parameters:
570 +  comm - the MPI communicator to use
571 .  bs - the block size
572 .  n - local vector length
573 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
574 .  nghost - number of local ghost blocks
575 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index
576 
577    Output Parameter:
578 .  vv - the global vector representation (without ghost points as part of vector)
579 
580    Notes:
581    Use VecGhostGetLocalForm() to access the local, ghosted representation
582    of the vector.
583 
584    n is the local vector size (total local size not the number of blocks) while nghost
585    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
586    portion is bs*nghost
587 
588    Level: advanced
589 
590    Concepts: vectors^ghosted
591 
592 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
593           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
594           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
595 
596 @*/
597 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
598 {
599   PetscErrorCode ierr;
600 
601   PetscFunctionBegin;
602   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
603   PetscFunctionReturn(0);
604 }
605