xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 7a0f38ec03370e13e8767947d08cb284a59a3e0a)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
17   *z   = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "VecPlaceArray_MPI"
39 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
40 {
41   PetscErrorCode ierr;
42   Vec_MPI        *v = (Vec_MPI*)vin->data;
43 
44   PetscFunctionBegin;
45   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
46   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
47   v->array         = (PetscScalar*)a;
48   if (v->localrep) {
49     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]);
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "VecDuplicate_MPI"
58 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
59 {
60   PetscErrorCode ierr;
61   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
62   PetscScalar    *array;
63 
64   PetscFunctionBegin;
65   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
66   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
67 
68   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
69   vw   = (Vec_MPI*)(*v)->data;
70   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
71 
72   /* save local representation of the parallel vector (and scatter) if it exists */
73   if (w->localrep) {
74     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
75     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
76     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
77     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
78     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
79 
80     vw->localupdate = w->localupdate;
81     if (vw->localupdate) {
82       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
83     }
84   }
85 
86   /* New vector should inherit stashing property of parent */
87   (*v)->stash.donotstash   = win->stash.donotstash;
88   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
89 
90   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
91   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
92 
93   (*v)->map->bs   = PetscAbs(win->map->bs);
94   (*v)->bstash.bs = win->bstash.bs;
95   PetscFunctionReturn(0);
96 }
97 
98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
99 extern PetscErrorCode VecResetArray_MPI(Vec);
100 
101 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
102                                 VecDuplicateVecs_Default,
103                                 VecDestroyVecs_Default,
104                                 VecDot_MPI,
105                                 VecMDot_MPI,
106                                 VecNorm_MPI,
107                                 VecTDot_MPI,
108                                 VecMTDot_MPI,
109                                 VecScale_Seq,
110                                 VecCopy_Seq, /* 10 */
111                                 VecSet_Seq,
112                                 VecSwap_Seq,
113                                 VecAXPY_Seq,
114                                 VecAXPBY_Seq,
115                                 VecMAXPY_Seq,
116                                 VecAYPX_Seq,
117                                 VecWAXPY_Seq,
118                                 VecAXPBYPCZ_Seq,
119                                 VecPointwiseMult_Seq,
120                                 VecPointwiseDivide_Seq,
121                                 VecSetValues_MPI, /* 20 */
122                                 VecAssemblyBegin_MPI,
123                                 VecAssemblyEnd_MPI,
124                                 0,
125                                 VecGetSize_MPI,
126                                 VecGetSize_Seq,
127                                 0,
128                                 VecMax_MPI,
129                                 VecMin_MPI,
130                                 VecSetRandom_Seq,
131                                 VecSetOption_MPI,
132                                 VecSetValuesBlocked_MPI,
133                                 VecDestroy_MPI,
134                                 VecView_MPI,
135                                 VecPlaceArray_MPI,
136                                 VecReplaceArray_Seq,
137                                 VecDot_Seq,
138                                 VecTDot_Seq,
139                                 VecNorm_Seq,
140                                 VecMDot_Seq,
141                                 VecMTDot_Seq,
142                                 VecLoad_Default,
143                                 VecReciprocal_Default,
144                                 VecConjugate_Seq,
145                                 0,
146                                 0,
147                                 VecResetArray_MPI,
148                                 0,
149                                 VecMaxPointwiseDivide_Seq,
150                                 VecPointwiseMax_Seq,
151                                 VecPointwiseMaxAbs_Seq,
152                                 VecPointwiseMin_Seq,
153                                 VecGetValues_MPI,
154                                 0,
155                                 0,
156                                 0,
157                                 0,
158                                 0,
159                                 0,
160                                 VecStrideGather_Default,
161                                 VecStrideScatter_Default,
162                                 0,
163                                 0,
164                                 0,
165                                 0,
166                                 0
167 };
168 
169 #undef __FUNCT__
170 #define __FUNCT__ "VecCreate_MPI_Private"
171 /*
172     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
173     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
174     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
175 
176     If alloc is true and array is NULL then this routine allocates the space, otherwise
177     no space is allocated.
178 */
179 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
180 {
181   Vec_MPI        *s;
182   PetscErrorCode ierr;
183 
184   PetscFunctionBegin;
185   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
186   v->data        = (void*)s;
187   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
188   s->nghost      = nghost;
189   v->petscnative = PETSC_TRUE;
190 
191   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
192 
193   s->array           = (PetscScalar*)array;
194   s->array_allocated = 0;
195   if (alloc && !array) {
196     PetscInt n = v->map->n+nghost;
197     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
198     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
199     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
200     s->array_allocated = s->array;
201   }
202 
203   /* By default parallel vectors do not have local representation */
204   s->localrep    = 0;
205   s->localupdate = 0;
206 
207   v->stash.insertmode = NOT_SET_VALUES;
208   /* create the stashes. The block-size for bstash is set later when
209      VecSetValuesBlocked is called.
210   */
211   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
212   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
213 
214 #if defined(PETSC_HAVE_MATLAB_ENGINE)
215   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
216   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
217 #endif
218   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 /*MC
223    VECMPI - VECMPI = "mpi" - The basic parallel vector
224 
225    Options Database Keys:
226 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
227 
228   Level: beginner
229 
230 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
231 M*/
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "VecCreate_MPI"
235 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
236 {
237   PetscErrorCode ierr;
238 
239   PetscFunctionBegin;
240   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
241   PetscFunctionReturn(0);
242 }
243 
244 /*MC
245    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
246 
247    Options Database Keys:
248 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
249 
250   Level: beginner
251 
252 .seealso: VecCreateSeq(), VecCreateMPI()
253 M*/
254 
255 #undef __FUNCT__
256 #define __FUNCT__ "VecCreate_Standard"
257 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
258 {
259   PetscErrorCode ierr;
260   PetscMPIInt    size;
261 
262   PetscFunctionBegin;
263   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
264   if (size == 1) {
265     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
266   } else {
267     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
268   }
269   PetscFunctionReturn(0);
270 }
271 
272 #undef __FUNCT__
273 #define __FUNCT__ "VecCreateMPIWithArray"
274 /*@C
275    VecCreateMPIWithArray - Creates a parallel, array-style vector,
276    where the user provides the array space to store the vector values.
277 
278    Collective on MPI_Comm
279 
280    Input Parameters:
281 +  comm  - the MPI communicator to use
282 .  bs    - block size, same meaning as VecSetBlockSize()
283 .  n     - local vector length, cannot be PETSC_DECIDE
284 .  N     - global vector length (or PETSC_DECIDE to have calculated)
285 -  array - the user provided array to store the vector values
286 
287    Output Parameter:
288 .  vv - the vector
289 
290    Notes:
291    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
292    same type as an existing vector.
293 
294    If the user-provided array is NULL, then VecPlaceArray() can be used
295    at a later stage to SET the array for storing the vector values.
296 
297    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
298    The user should not free the array until the vector is destroyed.
299 
300    Level: intermediate
301 
302    Concepts: vectors^creating with array
303 
304 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
305           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
306 
307 @*/
308 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
309 {
310   PetscErrorCode ierr;
311 
312   PetscFunctionBegin;
313   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
314   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
315   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
316   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
317   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
318   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
319   PetscFunctionReturn(0);
320 }
321 
322 #undef __FUNCT__
323 #define __FUNCT__ "VecCreateGhostWithArray"
324 /*@C
325    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
326    the caller allocates the array space.
327 
328    Collective on MPI_Comm
329 
330    Input Parameters:
331 +  comm - the MPI communicator to use
332 .  n - local vector length
333 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
334 .  nghost - number of local ghost points
335 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
336 -  array - the space to store the vector values (as long as n + nghost)
337 
338    Output Parameter:
339 .  vv - the global vector representation (without ghost points as part of vector)
340 
341    Notes:
342    Use VecGhostGetLocalForm() to access the local, ghosted representation
343    of the vector.
344 
345    This also automatically sets the ISLocalToGlobalMapping() for this vector.
346 
347    Level: advanced
348 
349    Concepts: vectors^creating with array
350    Concepts: vectors^ghosted
351 
352 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
353           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
354           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
355 
356 @*/
357 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
358 {
359   PetscErrorCode         ierr;
360   Vec_MPI                *w;
361   PetscScalar            *larray;
362   IS                     from,to;
363   ISLocalToGlobalMapping ltog;
364   PetscInt               rstart,i,*indices;
365 
366   PetscFunctionBegin;
367   *vv = 0;
368 
369   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
370   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
371   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
372   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
373   /* Create global representation */
374   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
375   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
376   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
377   w    = (Vec_MPI*)(*vv)->data;
378   /* Create local representation */
379   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
380   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
381   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
382   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
383 
384   /*
385        Create scatter context for scattering (updating) ghost values
386   */
387   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
388   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
389   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
390   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
391   ierr = ISDestroy(&to);CHKERRQ(ierr);
392   ierr = ISDestroy(&from);CHKERRQ(ierr);
393 
394   /* set local to global mapping for ghosted vector */
395   ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr);
396   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
397   for (i=0; i<n; i++) {
398     indices[i] = rstart + i;
399   }
400   for (i=0; i<nghost; i++) {
401     indices[n+i] = ghosts[i];
402   }
403   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
404   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
405   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
406   PetscFunctionReturn(0);
407 }
408 
409 #undef __FUNCT__
410 #define __FUNCT__ "VecCreateGhost"
411 /*@
412    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
413 
414    Collective on MPI_Comm
415 
416    Input Parameters:
417 +  comm - the MPI communicator to use
418 .  n - local vector length
419 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
420 .  nghost - number of local ghost points
421 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
422 
423    Output Parameter:
424 .  vv - the global vector representation (without ghost points as part of vector)
425 
426    Notes:
427    Use VecGhostGetLocalForm() to access the local, ghosted representation
428    of the vector.
429 
430    This also automatically sets the ISLocalToGlobalMapping() for this vector.
431 
432    Level: advanced
433 
434    Concepts: vectors^ghosted
435 
436 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
437           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
438           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
439           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
440 
441 @*/
442 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
443 {
444   PetscErrorCode ierr;
445 
446   PetscFunctionBegin;
447   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
448   PetscFunctionReturn(0);
449 }
450 
451 #undef __FUNCT__
452 #define __FUNCT__ "VecMPISetGhost"
453 /*@
454    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
455 
456    Collective on Vec
457 
458    Input Parameters:
459 +  vv - the MPI vector
460 .  nghost - number of local ghost points
461 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
462 
463 
464    Notes:
465    Use VecGhostGetLocalForm() to access the local, ghosted representation
466    of the vector.
467 
468    This also automatically sets the ISLocalToGlobalMapping() for this vector.
469 
470    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
471 
472    Level: advanced
473 
474    Concepts: vectors^ghosted
475 
476 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
477           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
478           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
479           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
480 
481 @*/
482 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
483 {
484   PetscErrorCode ierr;
485   PetscBool      flg;
486 
487   PetscFunctionBegin;
488   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
489   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
490   if (flg) {
491     PetscInt               n,N;
492     Vec_MPI                *w;
493     PetscScalar            *larray;
494     IS                     from,to;
495     ISLocalToGlobalMapping ltog;
496     PetscInt               rstart,i,*indices;
497     MPI_Comm               comm;
498 
499     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
500     n    = vv->map->n;
501     N    = vv->map->N;
502     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
503     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
504     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
505     w    = (Vec_MPI*)(vv)->data;
506     /* Create local representation */
507     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
508     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
509     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
510     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
511 
512     /*
513      Create scatter context for scattering (updating) ghost values
514      */
515     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
516     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
517     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
518     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
519     ierr = ISDestroy(&to);CHKERRQ(ierr);
520     ierr = ISDestroy(&from);CHKERRQ(ierr);
521 
522     /* set local to global mapping for ghosted vector */
523     ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr);
524     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
525 
526     for (i=0; i<n; i++)      indices[i]   = rstart + i;
527     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
528 
529     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
530     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
531     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
532   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
533   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
534   PetscFunctionReturn(0);
535 }
536 
537 
538 /* ------------------------------------------------------------------------------------------*/
539 #undef __FUNCT__
540 #define __FUNCT__ "VecCreateGhostBlockWithArray"
541 /*@C
542    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
543    the caller allocates the array space. Indices in the ghost region are based on blocks.
544 
545    Collective on MPI_Comm
546 
547    Input Parameters:
548 +  comm - the MPI communicator to use
549 .  bs - block size
550 .  n - local vector length
551 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
552 .  nghost - number of local ghost blocks
553 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
554 -  array - the space to store the vector values (as long as n + nghost*bs)
555 
556    Output Parameter:
557 .  vv - the global vector representation (without ghost points as part of vector)
558 
559    Notes:
560    Use VecGhostGetLocalForm() to access the local, ghosted representation
561    of the vector.
562 
563    n is the local vector size (total local size not the number of blocks) while nghost
564    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
565    portion is bs*nghost
566 
567    Level: advanced
568 
569    Concepts: vectors^creating ghosted
570    Concepts: vectors^creating with array
571 
572 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
573           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
574           VecCreateGhostWithArray(), VecCreateGhostBlock()
575 
576 @*/
577 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
578 {
579   PetscErrorCode         ierr;
580   Vec_MPI                *w;
581   PetscScalar            *larray;
582   IS                     from,to;
583   ISLocalToGlobalMapping ltog;
584   PetscInt               rstart,i,nb,*indices;
585 
586   PetscFunctionBegin;
587   *vv = 0;
588 
589   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
590   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
591   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
592   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
593   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
594   /* Create global representation */
595   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
596   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
597   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
598   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
599   w    = (Vec_MPI*)(*vv)->data;
600   /* Create local representation */
601   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
602   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
603   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
604   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
605 
606   /*
607        Create scatter context for scattering (updating) ghost values
608   */
609   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
610   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
611   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
612   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
613   ierr = ISDestroy(&to);CHKERRQ(ierr);
614   ierr = ISDestroy(&from);CHKERRQ(ierr);
615 
616   /* set local to global mapping for ghosted vector */
617   nb   = n/bs;
618   ierr = PetscMalloc1((nb+nghost),&indices);CHKERRQ(ierr);
619   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
620 
621   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
622   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
623 
624   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
625   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
626   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
627   PetscFunctionReturn(0);
628 }
629 
630 #undef __FUNCT__
631 #define __FUNCT__ "VecCreateGhostBlock"
632 /*@
633    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
634         The indicing of the ghost points is done with blocks.
635 
636    Collective on MPI_Comm
637 
638    Input Parameters:
639 +  comm - the MPI communicator to use
640 .  bs - the block size
641 .  n - local vector length
642 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
643 .  nghost - number of local ghost blocks
644 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
645 
646    Output Parameter:
647 .  vv - the global vector representation (without ghost points as part of vector)
648 
649    Notes:
650    Use VecGhostGetLocalForm() to access the local, ghosted representation
651    of the vector.
652 
653    n is the local vector size (total local size not the number of blocks) while nghost
654    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
655    portion is bs*nghost
656 
657    Level: advanced
658 
659    Concepts: vectors^ghosted
660 
661 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
662           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
663           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
664 
665 @*/
666 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
667 {
668   PetscErrorCode ierr;
669 
670   PetscFunctionBegin;
671   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
672   PetscFunctionReturn(0);
673 }
674