xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 519f805a543c2a7f195bcba21173bb2cdfadb956)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
17   *z = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 EXTERN_C_BEGIN
36 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
37 EXTERN_C_END
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "VecPlaceArray_MPI"
41 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
42 {
43   PetscErrorCode ierr;
44   Vec_MPI        *v = (Vec_MPI *)vin->data;
45 
46   PetscFunctionBegin;
47   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
48   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
49   v->array = (PetscScalar *)a;
50   if (v->localrep) {
51     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "VecDuplicate_MPI"
60 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
61 {
62   PetscErrorCode ierr;
63   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
64   PetscScalar    *array;
65 
66   PetscFunctionBegin;
67   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
68   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
69 
70   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
71   vw   = (Vec_MPI *)(*v)->data;
72   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
73 
74   /* save local representation of the parallel vector (and scatter) if it exists */
75   if (w->localrep) {
76     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
77     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->bs,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
78     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
79     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
80     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
81     vw->localupdate = w->localupdate;
82     if (vw->localupdate) {
83       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
84     }
85   }
86 
87   /* New vector should inherit stashing property of parent */
88   (*v)->stash.donotstash = win->stash.donotstash;
89   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
90 
91   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
92   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
93   (*v)->map->bs    = win->map->bs;
94   (*v)->bstash.bs = win->bstash.bs;
95 
96   PetscFunctionReturn(0);
97 }
98 
99 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
100 extern PetscErrorCode VecResetArray_MPI(Vec);
101 
102 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
103             VecDuplicateVecs_Default,
104             VecDestroyVecs_Default,
105             VecDot_MPI,
106             VecMDot_MPI,
107             VecNorm_MPI,
108             VecTDot_MPI,
109             VecMTDot_MPI,
110             VecScale_Seq,
111             VecCopy_Seq, /* 10 */
112             VecSet_Seq,
113             VecSwap_Seq,
114             VecAXPY_Seq,
115             VecAXPBY_Seq,
116             VecMAXPY_Seq,
117             VecAYPX_Seq,
118             VecWAXPY_Seq,
119             VecAXPBYPCZ_Seq,
120             VecPointwiseMult_Seq,
121             VecPointwiseDivide_Seq,
122             VecSetValues_MPI, /* 20 */
123             VecAssemblyBegin_MPI,
124             VecAssemblyEnd_MPI,
125             0,
126             VecGetSize_MPI,
127             VecGetSize_Seq,
128             0,
129             VecMax_MPI,
130             VecMin_MPI,
131             VecSetRandom_Seq,
132             VecSetOption_MPI,
133             VecSetValuesBlocked_MPI,
134             VecDestroy_MPI,
135             VecView_MPI,
136             VecPlaceArray_MPI,
137             VecReplaceArray_Seq,
138             VecDot_Seq,
139             VecTDot_Seq,
140             VecNorm_Seq,
141             VecMDot_Seq,
142             VecMTDot_Seq,
143             VecLoad_Default,
144             VecReciprocal_Default,
145             VecConjugate_Seq,
146             0,
147             0,
148             VecResetArray_MPI,
149             0,
150             VecMaxPointwiseDivide_Seq,
151             VecPointwiseMax_Seq,
152             VecPointwiseMaxAbs_Seq,
153             VecPointwiseMin_Seq,
154             VecGetValues_MPI,
155             0,
156             0,
157             0,
158             0,
159             0,
160             0,
161             VecStrideGather_Default,
162             VecStrideScatter_Default
163 };
164 
165 #undef __FUNCT__
166 #define __FUNCT__ "VecCreate_MPI_Private"
167 /*
168     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
169     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
170     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
171 
172     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
173     no space is allocated.
174 */
175 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool  alloc,PetscInt nghost,const PetscScalar array[])
176 {
177   Vec_MPI        *s;
178   PetscErrorCode ierr;
179 
180   PetscFunctionBegin;
181 
182   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
183   v->data        = (void*)s;
184   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
185   s->nghost      = nghost;
186   v->petscnative = PETSC_TRUE;
187 
188   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
189   s->array           = (PetscScalar *)array;
190   s->array_allocated = 0;
191   if (alloc && !array) {
192     PetscInt n         = v->map->n+nghost;
193     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
194     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
195     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
196     s->array_allocated = s->array;
197   }
198 
199   /* By default parallel vectors do not have local representation */
200   s->localrep    = 0;
201   s->localupdate = 0;
202 
203   v->stash.insertmode  = NOT_SET_VALUES;
204   /* create the stashes. The block-size for bstash is set later when
205      VecSetValuesBlocked is called.
206   */
207   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
208   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
209 
210 #if defined(PETSC_HAVE_MATLAB_ENGINE)
211   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
212   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
213 #endif
214   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 
218 /*MC
219    VECMPI - VECMPI = "mpi" - The basic parallel vector
220 
221    Options Database Keys:
222 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
223 
224   Level: beginner
225 
226 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
227 M*/
228 
229 EXTERN_C_BEGIN
230 #undef __FUNCT__
231 #define __FUNCT__ "VecCreate_MPI"
232 PetscErrorCode  VecCreate_MPI(Vec vv)
233 {
234   PetscErrorCode ierr;
235 
236   PetscFunctionBegin;
237   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
238   PetscFunctionReturn(0);
239 }
240 EXTERN_C_END
241 
242 /*MC
243    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
244 
245    Options Database Keys:
246 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
247 
248   Level: beginner
249 
250 .seealso: VecCreateSeq(), VecCreateMPI()
251 M*/
252 
253 EXTERN_C_BEGIN
254 #undef __FUNCT__
255 #define __FUNCT__ "VecCreate_Standard"
256 PetscErrorCode  VecCreate_Standard(Vec v)
257 {
258   PetscErrorCode ierr;
259   PetscMPIInt    size;
260 
261   PetscFunctionBegin;
262   ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
263   if (size == 1) {
264     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
265   } else {
266     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
267   }
268   PetscFunctionReturn(0);
269 }
270 EXTERN_C_END
271 
272 
273 #undef __FUNCT__
274 #define __FUNCT__ "VecCreateMPIWithArray"
275 /*@C
276    VecCreateMPIWithArray - Creates a parallel, array-style vector,
277    where the user provides the array space to store the vector values.
278 
279    Collective on MPI_Comm
280 
281    Input Parameters:
282 +  comm  - the MPI communicator to use
283 .  bs    - block size, same meaning as VecSetBlockSize()
284 .  n     - local vector length, cannot be PETSC_DECIDE
285 .  N     - global vector length (or PETSC_DECIDE to have calculated)
286 -  array - the user provided array to store the vector values
287 
288    Output Parameter:
289 .  vv - the vector
290 
291    Notes:
292    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
293    same type as an existing vector.
294 
295    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
296    at a later stage to SET the array for storing the vector values.
297 
298    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
299    The user should not free the array until the vector is destroyed.
300 
301    Level: intermediate
302 
303    Concepts: vectors^creating with array
304 
305 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
306           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
307 
308 @*/
309 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
310 {
311   PetscErrorCode ierr;
312 
313   PetscFunctionBegin;
314   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
315   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
316   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
317   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
318   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
319   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
320   PetscFunctionReturn(0);
321 }
322 
323 #undef __FUNCT__
324 #define __FUNCT__ "VecCreateGhostWithArray"
325 /*@C
326    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
327    the caller allocates the array space.
328 
329    Collective on MPI_Comm
330 
331    Input Parameters:
332 +  comm - the MPI communicator to use
333 .  n - local vector length
334 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
335 .  nghost - number of local ghost points
336 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed), these do not need to be in increasing order (sorted)
337 -  array - the space to store the vector values (as long as n + nghost)
338 
339    Output Parameter:
340 .  vv - the global vector representation (without ghost points as part of vector)
341 
342    Notes:
343    Use VecGhostGetLocalForm() to access the local, ghosted representation
344    of the vector.
345 
346    This also automatically sets the ISLocalToGlobalMapping() for this vector.
347 
348    Level: advanced
349 
350    Concepts: vectors^creating with array
351    Concepts: vectors^ghosted
352 
353 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
354           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
355           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
356 
357 @*/
358 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
359 {
360   PetscErrorCode         ierr;
361   Vec_MPI                *w;
362   PetscScalar            *larray;
363   IS                     from,to;
364   ISLocalToGlobalMapping ltog;
365   PetscInt               rstart,i,*indices;
366 
367   PetscFunctionBegin;
368   *vv = 0;
369 
370   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
371   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
372   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
373   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
374   /* Create global representation */
375   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
376   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
377   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
378   w    = (Vec_MPI *)(*vv)->data;
379   /* Create local representation */
380   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
381   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
382   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
383   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
384 
385   /*
386        Create scatter context for scattering (updating) ghost values
387   */
388   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
389   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
390   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
391   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
392   ierr = ISDestroy(&to);CHKERRQ(ierr);
393   ierr = ISDestroy(&from);CHKERRQ(ierr);
394 
395   /* set local to global mapping for ghosted vector */
396   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
397   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
398   for (i=0; i<n; i++) {
399     indices[i] = rstart + i;
400   }
401   for (i=0; i<nghost; i++) {
402     indices[n+i] = ghosts[i];
403   }
404   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
405   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
406   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
407   PetscFunctionReturn(0);
408 }
409 
410 #undef __FUNCT__
411 #define __FUNCT__ "VecCreateGhost"
412 /*@
413    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
414 
415    Collective on MPI_Comm
416 
417    Input Parameters:
418 +  comm - the MPI communicator to use
419 .  n - local vector length
420 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
421 .  nghost - number of local ghost points
422 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
423 
424    Output Parameter:
425 .  vv - the global vector representation (without ghost points as part of vector)
426 
427    Notes:
428    Use VecGhostGetLocalForm() to access the local, ghosted representation
429    of the vector.
430 
431    This also automatically sets the ISLocalToGlobalMapping() for this vector.
432 
433    Level: advanced
434 
435    Concepts: vectors^ghosted
436 
437 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
438           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
439           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
440           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
441 
442 @*/
443 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
444 {
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
449   PetscFunctionReturn(0);
450 }
451 
452 #undef __FUNCT__
453 #define __FUNCT__ "VecMPISetGhost"
454 /*@
455    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
456 
457    Collective on Vec
458 
459    Input Parameters:
460 +  vv - the MPI vector
461 .  nghost - number of local ghost points
462 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
463 
464 
465    Notes:
466    Use VecGhostGetLocalForm() to access the local, ghosted representation
467    of the vector.
468 
469    This also automatically sets the ISLocalToGlobalMapping() for this vector.
470 
471    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
472 
473    Level: advanced
474 
475    Concepts: vectors^ghosted
476 
477 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
478           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
479           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
480           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
481 
482 @*/
483 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
484 {
485   PetscErrorCode ierr;
486   PetscBool      flg;
487 
488   PetscFunctionBegin;
489   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
490   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
491   if (flg) {
492     PetscInt               n,N;
493     Vec_MPI                *w;
494     PetscScalar            *larray;
495     IS                     from,to;
496     ISLocalToGlobalMapping ltog;
497     PetscInt               rstart,i,*indices;
498     MPI_Comm               comm = ((PetscObject)vv)->comm;
499 
500     n = vv->map->n;
501     N = vv->map->N;
502     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
503     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
504     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);CHKERRQ(ierr);
505     w    = (Vec_MPI *)(vv)->data;
506     /* Create local representation */
507     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
508     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
509     ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr);
510     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
511 
512     /*
513      Create scatter context for scattering (updating) ghost values
514      */
515     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
516     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
517     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
518     ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr);
519     ierr = ISDestroy(&to);CHKERRQ(ierr);
520     ierr = ISDestroy(&from);CHKERRQ(ierr);
521 
522     /* set local to global mapping for ghosted vector */
523     ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
524     ierr = VecGetOwnershipRange(vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
525     for (i=0; i<n; i++) {
526       indices[i] = rstart + i;
527     }
528     for (i=0; i<nghost; i++) {
529       indices[n+i] = ghosts[i];
530     }
531     ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
532     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
533     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
534   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
535   else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
536   PetscFunctionReturn(0);
537 }
538 
539 
540 /* ------------------------------------------------------------------------------------------*/
541 #undef __FUNCT__
542 #define __FUNCT__ "VecCreateGhostBlockWithArray"
543 /*@C
544    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
545    the caller allocates the array space. Indices in the ghost region are based on blocks.
546 
547    Collective on MPI_Comm
548 
549    Input Parameters:
550 +  comm - the MPI communicator to use
551 .  bs - block size
552 .  n - local vector length
553 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
554 .  nghost - number of local ghost blocks
555 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
556 -  array - the space to store the vector values (as long as n + nghost*bs)
557 
558    Output Parameter:
559 .  vv - the global vector representation (without ghost points as part of vector)
560 
561    Notes:
562    Use VecGhostGetLocalForm() to access the local, ghosted representation
563    of the vector.
564 
565    n is the local vector size (total local size not the number of blocks) while nghost
566    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
567    portion is bs*nghost
568 
569    Level: advanced
570 
571    Concepts: vectors^creating ghosted
572    Concepts: vectors^creating with array
573 
574 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
575           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
576           VecCreateGhostWithArray(), VecCreateGhostBlock()
577 
578 @*/
579 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
580 {
581   PetscErrorCode         ierr;
582   Vec_MPI                *w;
583   PetscScalar            *larray;
584   IS                     from,to;
585   ISLocalToGlobalMapping ltog;
586   PetscInt               rstart,i,nb,*indices;
587 
588   PetscFunctionBegin;
589   *vv = 0;
590 
591   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
592   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
593   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
594   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
595   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
596   /* Create global representation */
597   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
598   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
599   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
600   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
601   w    = (Vec_MPI *)(*vv)->data;
602   /* Create local representation */
603   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
604   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
605   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
606   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
607 
608   /*
609        Create scatter context for scattering (updating) ghost values
610   */
611   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
612   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
613   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
614   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
615   ierr = ISDestroy(&to);CHKERRQ(ierr);
616   ierr = ISDestroy(&from);CHKERRQ(ierr);
617 
618   /* set local to global mapping for ghosted vector */
619   nb = n/bs;
620   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
621   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
622   for (i=0; i<nb; i++) {
623     indices[i] = rstart + i*bs;
624   }
625   for (i=0; i<nghost; i++) {
626     indices[nb+i] = ghosts[i];
627   }
628   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
629   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
630   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
631   PetscFunctionReturn(0);
632 }
633 
634 #undef __FUNCT__
635 #define __FUNCT__ "VecCreateGhostBlock"
636 /*@
637    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
638         The indicing of the ghost points is done with blocks.
639 
640    Collective on MPI_Comm
641 
642    Input Parameters:
643 +  comm - the MPI communicator to use
644 .  bs - the block size
645 .  n - local vector length
646 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
647 .  nghost - number of local ghost blocks
648 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
649 
650    Output Parameter:
651 .  vv - the global vector representation (without ghost points as part of vector)
652 
653    Notes:
654    Use VecGhostGetLocalForm() to access the local, ghosted representation
655    of the vector.
656 
657    n is the local vector size (total local size not the number of blocks) while nghost
658    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
659    portion is bs*nghost
660 
661    Level: advanced
662 
663    Concepts: vectors^ghosted
664 
665 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
666           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
667           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
668 
669 @*/
670 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
671 {
672   PetscErrorCode ierr;
673 
674   PetscFunctionBegin;
675   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
676   PetscFunctionReturn(0);
677 }
678