xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision be76dc844bdb1d56f5e6c8c47272cff5c31987cd)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
17   *z   = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 EXTERN_C_BEGIN
36 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
37 EXTERN_C_END
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "VecPlaceArray_MPI"
41 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
42 {
43   PetscErrorCode ierr;
44   Vec_MPI        *v = (Vec_MPI*)vin->data;
45 
46   PetscFunctionBegin;
47   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
48   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
49   v->array         = (PetscScalar*)a;
50   if (v->localrep) {
51     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "VecDuplicate_MPI"
60 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
61 {
62   PetscErrorCode ierr;
63   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
64   PetscScalar    *array;
65 
66   PetscFunctionBegin;
67   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
68   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
69 
70   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
71   vw   = (Vec_MPI*)(*v)->data;
72   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
73 
74   /* save local representation of the parallel vector (and scatter) if it exists */
75   if (w->localrep) {
76     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
77     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->bs,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
78     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
79     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
80     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
81     vw->localupdate = w->localupdate;
82     if (vw->localupdate) {
83       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
84     }
85   }
86 
87   /* New vector should inherit stashing property of parent */
88   (*v)->stash.donotstash = win->stash.donotstash;
89   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
90 
91   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
92   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
93   (*v)->map->bs   = win->map->bs;
94   (*v)->bstash.bs = win->bstash.bs;
95   PetscFunctionReturn(0);
96 }
97 
98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
99 extern PetscErrorCode VecResetArray_MPI(Vec);
100 
101 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
102                                 VecDuplicateVecs_Default,
103                                 VecDestroyVecs_Default,
104                                 VecDot_MPI,
105                                 VecMDot_MPI,
106                                 VecNorm_MPI,
107                                 VecTDot_MPI,
108                                 VecMTDot_MPI,
109                                 VecScale_Seq,
110                                 VecCopy_Seq, /* 10 */
111                                 VecSet_Seq,
112                                 VecSwap_Seq,
113                                 VecAXPY_Seq,
114                                 VecAXPBY_Seq,
115                                 VecMAXPY_Seq,
116                                 VecAYPX_Seq,
117                                 VecWAXPY_Seq,
118                                 VecAXPBYPCZ_Seq,
119                                 VecPointwiseMult_Seq,
120                                 VecPointwiseDivide_Seq,
121                                 VecSetValues_MPI, /* 20 */
122                                 VecAssemblyBegin_MPI,
123                                 VecAssemblyEnd_MPI,
124                                 0,
125                                 VecGetSize_MPI,
126                                 VecGetSize_Seq,
127                                 0,
128                                 VecMax_MPI,
129                                 VecMin_MPI,
130                                 VecSetRandom_Seq,
131                                 VecSetOption_MPI,
132                                 VecSetValuesBlocked_MPI,
133                                 VecDestroy_MPI,
134                                 VecView_MPI,
135                                 VecPlaceArray_MPI,
136                                 VecReplaceArray_Seq,
137                                 VecDot_Seq,
138                                 VecTDot_Seq,
139                                 VecNorm_Seq,
140                                 VecMDot_Seq,
141                                 VecMTDot_Seq,
142                                 VecLoad_Default,
143                                 VecReciprocal_Default,
144                                 VecConjugate_Seq,
145                                 0,
146                                 0,
147                                 VecResetArray_MPI,
148                                 0,
149                                 VecMaxPointwiseDivide_Seq,
150                                 VecPointwiseMax_Seq,
151                                 VecPointwiseMaxAbs_Seq,
152                                 VecPointwiseMin_Seq,
153                                 VecGetValues_MPI,
154                                 0,
155                                 0,
156                                 0,
157                                 0,
158                                 0,
159                                 0,
160                                 VecStrideGather_Default,
161                                 VecStrideScatter_Default};
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "VecCreate_MPI_Private"
165 /*
166     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
167     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
168     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
169 
170     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
171     no space is allocated.
172 */
173 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
174 {
175   Vec_MPI        *s;
176   PetscErrorCode ierr;
177 
178   PetscFunctionBegin;
179   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
180   v->data        = (void*)s;
181   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
182   s->nghost      = nghost;
183   v->petscnative = PETSC_TRUE;
184 
185   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
186   s->array           = (PetscScalar*)array;
187   s->array_allocated = 0;
188   if (alloc && !array) {
189     PetscInt n = v->map->n+nghost;
190     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
191     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
192     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
193     s->array_allocated = s->array;
194   }
195 
196   /* By default parallel vectors do not have local representation */
197   s->localrep    = 0;
198   s->localupdate = 0;
199 
200   v->stash.insertmode = NOT_SET_VALUES;
201   /* create the stashes. The block-size for bstash is set later when
202      VecSetValuesBlocked is called.
203   */
204   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
205   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
206 
207 #if defined(PETSC_HAVE_MATLAB_ENGINE)
208   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
209   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
210 #endif
211   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
212   PetscFunctionReturn(0);
213 }
214 
215 /*MC
216    VECMPI - VECMPI = "mpi" - The basic parallel vector
217 
218    Options Database Keys:
219 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
220 
221   Level: beginner
222 
223 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
224 M*/
225 
226 EXTERN_C_BEGIN
227 #undef __FUNCT__
228 #define __FUNCT__ "VecCreate_MPI"
229 PetscErrorCode  VecCreate_MPI(Vec vv)
230 {
231   PetscErrorCode ierr;
232 
233   PetscFunctionBegin;
234   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
235   PetscFunctionReturn(0);
236 }
237 EXTERN_C_END
238 
239 /*MC
240    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
241 
242    Options Database Keys:
243 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
244 
245   Level: beginner
246 
247 .seealso: VecCreateSeq(), VecCreateMPI()
248 M*/
249 
250 EXTERN_C_BEGIN
251 #undef __FUNCT__
252 #define __FUNCT__ "VecCreate_Standard"
253 PetscErrorCode  VecCreate_Standard(Vec v)
254 {
255   PetscErrorCode ierr;
256   PetscMPIInt    size;
257 
258   PetscFunctionBegin;
259   ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
260   if (size == 1) {
261     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
262   } else {
263     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
264   }
265   PetscFunctionReturn(0);
266 }
267 EXTERN_C_END
268 
269 
270 #undef __FUNCT__
271 #define __FUNCT__ "VecCreateMPIWithArray"
272 /*@C
273    VecCreateMPIWithArray - Creates a parallel, array-style vector,
274    where the user provides the array space to store the vector values.
275 
276    Collective on MPI_Comm
277 
278    Input Parameters:
279 +  comm  - the MPI communicator to use
280 .  bs    - block size, same meaning as VecSetBlockSize()
281 .  n     - local vector length, cannot be PETSC_DECIDE
282 .  N     - global vector length (or PETSC_DECIDE to have calculated)
283 -  array - the user provided array to store the vector values
284 
285    Output Parameter:
286 .  vv - the vector
287 
288    Notes:
289    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
290    same type as an existing vector.
291 
292    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
293    at a later stage to SET the array for storing the vector values.
294 
295    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
296    The user should not free the array until the vector is destroyed.
297 
298    Level: intermediate
299 
300    Concepts: vectors^creating with array
301 
302 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
303           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
304 
305 @*/
306 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
307 {
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
312   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
313   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
314   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
315   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
316   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
317   PetscFunctionReturn(0);
318 }
319 
320 #undef __FUNCT__
321 #define __FUNCT__ "VecCreateGhostWithArray"
322 /*@C
323    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
324    the caller allocates the array space.
325 
326    Collective on MPI_Comm
327 
328    Input Parameters:
329 +  comm - the MPI communicator to use
330 .  n - local vector length
331 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
332 .  nghost - number of local ghost points
333 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed), these do not need to be in increasing order (sorted)
334 -  array - the space to store the vector values (as long as n + nghost)
335 
336    Output Parameter:
337 .  vv - the global vector representation (without ghost points as part of vector)
338 
339    Notes:
340    Use VecGhostGetLocalForm() to access the local, ghosted representation
341    of the vector.
342 
343    This also automatically sets the ISLocalToGlobalMapping() for this vector.
344 
345    Level: advanced
346 
347    Concepts: vectors^creating with array
348    Concepts: vectors^ghosted
349 
350 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
351           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
352           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
353 
354 @*/
355 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
356 {
357   PetscErrorCode         ierr;
358   Vec_MPI                *w;
359   PetscScalar            *larray;
360   IS                     from,to;
361   ISLocalToGlobalMapping ltog;
362   PetscInt               rstart,i,*indices;
363 
364   PetscFunctionBegin;
365   *vv = 0;
366 
367   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
368   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
369   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
370   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
371   /* Create global representation */
372   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
373   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
374   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
375   w    = (Vec_MPI*)(*vv)->data;
376   /* Create local representation */
377   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
378   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
379   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
380   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
381 
382   /*
383        Create scatter context for scattering (updating) ghost values
384   */
385   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
386   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
387   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
388   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
389   ierr = ISDestroy(&to);CHKERRQ(ierr);
390   ierr = ISDestroy(&from);CHKERRQ(ierr);
391 
392   /* set local to global mapping for ghosted vector */
393   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
394   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
395   for (i=0; i<n; i++) {
396     indices[i] = rstart + i;
397   }
398   for (i=0; i<nghost; i++) {
399     indices[n+i] = ghosts[i];
400   }
401   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
402   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
403   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
404   PetscFunctionReturn(0);
405 }
406 
407 #undef __FUNCT__
408 #define __FUNCT__ "VecCreateGhost"
409 /*@
410    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
411 
412    Collective on MPI_Comm
413 
414    Input Parameters:
415 +  comm - the MPI communicator to use
416 .  n - local vector length
417 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
418 .  nghost - number of local ghost points
419 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
420 
421    Output Parameter:
422 .  vv - the global vector representation (without ghost points as part of vector)
423 
424    Notes:
425    Use VecGhostGetLocalForm() to access the local, ghosted representation
426    of the vector.
427 
428    This also automatically sets the ISLocalToGlobalMapping() for this vector.
429 
430    Level: advanced
431 
432    Concepts: vectors^ghosted
433 
434 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
435           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
436           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
437           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
438 
439 @*/
440 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
441 {
442   PetscErrorCode ierr;
443 
444   PetscFunctionBegin;
445   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 #undef __FUNCT__
450 #define __FUNCT__ "VecMPISetGhost"
451 /*@
452    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
453 
454    Collective on Vec
455 
456    Input Parameters:
457 +  vv - the MPI vector
458 .  nghost - number of local ghost points
459 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
460 
461 
462    Notes:
463    Use VecGhostGetLocalForm() to access the local, ghosted representation
464    of the vector.
465 
466    This also automatically sets the ISLocalToGlobalMapping() for this vector.
467 
468    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
469 
470    Level: advanced
471 
472    Concepts: vectors^ghosted
473 
474 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
475           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
476           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
477           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
478 
479 @*/
480 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
481 {
482   PetscErrorCode ierr;
483   PetscBool      flg;
484 
485   PetscFunctionBegin;
486   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
487   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
488   if (flg) {
489     PetscInt               n,N;
490     Vec_MPI                *w;
491     PetscScalar            *larray;
492     IS                     from,to;
493     ISLocalToGlobalMapping ltog;
494     PetscInt               rstart,i,*indices;
495     MPI_Comm               comm = ((PetscObject)vv)->comm;
496 
497     n    = vv->map->n;
498     N    = vv->map->N;
499     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
500     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
501     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);CHKERRQ(ierr);
502     w    = (Vec_MPI*)(vv)->data;
503     /* Create local representation */
504     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
505     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
506     ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr);
507     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
508 
509     /*
510      Create scatter context for scattering (updating) ghost values
511      */
512     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
513     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
514     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
515     ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr);
516     ierr = ISDestroy(&to);CHKERRQ(ierr);
517     ierr = ISDestroy(&from);CHKERRQ(ierr);
518 
519     /* set local to global mapping for ghosted vector */
520     ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
521     ierr = VecGetOwnershipRange(vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
522     for (i=0; i<n; i++) {
523       indices[i] = rstart + i;
524     }
525     for (i=0; i<nghost; i++) {
526       indices[n+i] = ghosts[i];
527     }
528     ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
529     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
530     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
531   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
532   else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
533   PetscFunctionReturn(0);
534 }
535 
536 
537 /* ------------------------------------------------------------------------------------------*/
538 #undef __FUNCT__
539 #define __FUNCT__ "VecCreateGhostBlockWithArray"
540 /*@C
541    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
542    the caller allocates the array space. Indices in the ghost region are based on blocks.
543 
544    Collective on MPI_Comm
545 
546    Input Parameters:
547 +  comm - the MPI communicator to use
548 .  bs - block size
549 .  n - local vector length
550 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
551 .  nghost - number of local ghost blocks
552 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
553 -  array - the space to store the vector values (as long as n + nghost*bs)
554 
555    Output Parameter:
556 .  vv - the global vector representation (without ghost points as part of vector)
557 
558    Notes:
559    Use VecGhostGetLocalForm() to access the local, ghosted representation
560    of the vector.
561 
562    n is the local vector size (total local size not the number of blocks) while nghost
563    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
564    portion is bs*nghost
565 
566    Level: advanced
567 
568    Concepts: vectors^creating ghosted
569    Concepts: vectors^creating with array
570 
571 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
572           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
573           VecCreateGhostWithArray(), VecCreateGhostBlock()
574 
575 @*/
576 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
577 {
578   PetscErrorCode         ierr;
579   Vec_MPI                *w;
580   PetscScalar            *larray;
581   IS                     from,to;
582   ISLocalToGlobalMapping ltog;
583   PetscInt               rstart,i,nb,*indices;
584 
585   PetscFunctionBegin;
586   *vv = 0;
587 
588   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
589   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
590   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
591   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
592   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
593   /* Create global representation */
594   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
595   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
596   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
597   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
598   w    = (Vec_MPI*)(*vv)->data;
599   /* Create local representation */
600   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
601   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
602   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
603   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
604 
605   /*
606        Create scatter context for scattering (updating) ghost values
607   */
608   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
609   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
610   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
611   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
612   ierr = ISDestroy(&to);CHKERRQ(ierr);
613   ierr = ISDestroy(&from);CHKERRQ(ierr);
614 
615   /* set local to global mapping for ghosted vector */
616   nb   = n/bs;
617   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
618   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
619   for (i=0; i<nb; i++) {
620     indices[i] = rstart + i*bs;
621   }
622   for (i=0; i<nghost; i++) {
623     indices[nb+i] = ghosts[i];
624   }
625   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
626   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
627   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
628   PetscFunctionReturn(0);
629 }
630 
631 #undef __FUNCT__
632 #define __FUNCT__ "VecCreateGhostBlock"
633 /*@
634    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
635         The indicing of the ghost points is done with blocks.
636 
637    Collective on MPI_Comm
638 
639    Input Parameters:
640 +  comm - the MPI communicator to use
641 .  bs - the block size
642 .  n - local vector length
643 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
644 .  nghost - number of local ghost blocks
645 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
646 
647    Output Parameter:
648 .  vv - the global vector representation (without ghost points as part of vector)
649 
650    Notes:
651    Use VecGhostGetLocalForm() to access the local, ghosted representation
652    of the vector.
653 
654    n is the local vector size (total local size not the number of blocks) while nghost
655    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
656    portion is bs*nghost
657 
658    Level: advanced
659 
660    Concepts: vectors^ghosted
661 
662 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
663           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
664           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
665 
666 @*/
667 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
668 {
669   PetscErrorCode ierr;
670 
671   PetscFunctionBegin;
672   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
673   PetscFunctionReturn(0);
674 }
675