xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 0d577829c9e0884523944cb97be9f7374153a681)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
17   *z   = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 EXTERN_C_BEGIN
36 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
37 EXTERN_C_END
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "VecPlaceArray_MPI"
41 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
42 {
43   PetscErrorCode ierr;
44   Vec_MPI        *v = (Vec_MPI*)vin->data;
45 
46   PetscFunctionBegin;
47   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
48   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
49   v->array         = (PetscScalar*)a;
50   if (v->localrep) {
51     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "VecDuplicate_MPI"
60 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
61 {
62   PetscErrorCode ierr;
63   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
64   PetscScalar    *array;
65 
66   PetscFunctionBegin;
67   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
68   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
69 
70   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
71   vw   = (Vec_MPI*)(*v)->data;
72   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
73 
74   /* save local representation of the parallel vector (and scatter) if it exists */
75   if (w->localrep) {
76     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
77     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->bs,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
78     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
79     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
80     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
81 
82     vw->localupdate = w->localupdate;
83     if (vw->localupdate) {
84       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
85     }
86   }
87 
88   /* New vector should inherit stashing property of parent */
89   (*v)->stash.donotstash   = win->stash.donotstash;
90   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
91 
92   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
93   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
94 
95   (*v)->map->bs   = win->map->bs;
96   (*v)->bstash.bs = win->bstash.bs;
97   PetscFunctionReturn(0);
98 }
99 
100 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
101 extern PetscErrorCode VecResetArray_MPI(Vec);
102 
103 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
104                                 VecDuplicateVecs_Default,
105                                 VecDestroyVecs_Default,
106                                 VecDot_MPI,
107                                 VecMDot_MPI,
108                                 VecNorm_MPI,
109                                 VecTDot_MPI,
110                                 VecMTDot_MPI,
111                                 VecScale_Seq,
112                                 VecCopy_Seq, /* 10 */
113                                 VecSet_Seq,
114                                 VecSwap_Seq,
115                                 VecAXPY_Seq,
116                                 VecAXPBY_Seq,
117                                 VecMAXPY_Seq,
118                                 VecAYPX_Seq,
119                                 VecWAXPY_Seq,
120                                 VecAXPBYPCZ_Seq,
121                                 VecPointwiseMult_Seq,
122                                 VecPointwiseDivide_Seq,
123                                 VecSetValues_MPI, /* 20 */
124                                 VecAssemblyBegin_MPI,
125                                 VecAssemblyEnd_MPI,
126                                 0,
127                                 VecGetSize_MPI,
128                                 VecGetSize_Seq,
129                                 0,
130                                 VecMax_MPI,
131                                 VecMin_MPI,
132                                 VecSetRandom_Seq,
133                                 VecSetOption_MPI,
134                                 VecSetValuesBlocked_MPI,
135                                 VecDestroy_MPI,
136                                 VecView_MPI,
137                                 VecPlaceArray_MPI,
138                                 VecReplaceArray_Seq,
139                                 VecDot_Seq,
140                                 VecTDot_Seq,
141                                 VecNorm_Seq,
142                                 VecMDot_Seq,
143                                 VecMTDot_Seq,
144                                 VecLoad_Default,
145                                 VecReciprocal_Default,
146                                 VecConjugate_Seq,
147                                 0,
148                                 0,
149                                 VecResetArray_MPI,
150                                 0,
151                                 VecMaxPointwiseDivide_Seq,
152                                 VecPointwiseMax_Seq,
153                                 VecPointwiseMaxAbs_Seq,
154                                 VecPointwiseMin_Seq,
155                                 VecGetValues_MPI,
156                                 0,
157                                 0,
158                                 0,
159                                 0,
160                                 0,
161                                 0,
162                                 VecStrideGather_Default,
163                                 VecStrideScatter_Default};
164 
165 #undef __FUNCT__
166 #define __FUNCT__ "VecCreate_MPI_Private"
167 /*
168     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
169     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
170     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
171 
172     If alloc is true and array is NULL then this routine allocates the space, otherwise
173     no space is allocated.
174 */
175 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
176 {
177   Vec_MPI        *s;
178   PetscErrorCode ierr;
179 
180   PetscFunctionBegin;
181   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
182   v->data        = (void*)s;
183   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
184   s->nghost      = nghost;
185   v->petscnative = PETSC_TRUE;
186 
187   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
188 
189   s->array           = (PetscScalar*)array;
190   s->array_allocated = 0;
191   if (alloc && !array) {
192     PetscInt n = v->map->n+nghost;
193     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
194     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
195     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
196     s->array_allocated = s->array;
197   }
198 
199   /* By default parallel vectors do not have local representation */
200   s->localrep    = 0;
201   s->localupdate = 0;
202 
203   v->stash.insertmode = NOT_SET_VALUES;
204   /* create the stashes. The block-size for bstash is set later when
205      VecSetValuesBlocked is called.
206   */
207   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
208   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),v->map->bs,&v->bstash);CHKERRQ(ierr);
209 
210 #if defined(PETSC_HAVE_MATLAB_ENGINE)
211   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
212   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
213 #endif
214   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 
218 /*MC
219    VECMPI - VECMPI = "mpi" - The basic parallel vector
220 
221    Options Database Keys:
222 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
223 
224   Level: beginner
225 
226 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
227 M*/
228 
229 EXTERN_C_BEGIN
230 #undef __FUNCT__
231 #define __FUNCT__ "VecCreate_MPI"
232 PetscErrorCode  VecCreate_MPI(Vec vv)
233 {
234   PetscErrorCode ierr;
235 
236   PetscFunctionBegin;
237   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
238   PetscFunctionReturn(0);
239 }
240 EXTERN_C_END
241 
242 /*MC
243    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
244 
245    Options Database Keys:
246 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
247 
248   Level: beginner
249 
250 .seealso: VecCreateSeq(), VecCreateMPI()
251 M*/
252 
253 EXTERN_C_BEGIN
254 #undef __FUNCT__
255 #define __FUNCT__ "VecCreate_Standard"
256 PetscErrorCode  VecCreate_Standard(Vec v)
257 {
258   PetscErrorCode ierr;
259   PetscMPIInt    size;
260 
261   PetscFunctionBegin;
262   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
263   if (size == 1) {
264     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
265   } else {
266     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
267   }
268   PetscFunctionReturn(0);
269 }
270 EXTERN_C_END
271 
272 
273 #undef __FUNCT__
274 #define __FUNCT__ "VecCreateMPIWithArray"
275 /*@C
276    VecCreateMPIWithArray - Creates a parallel, array-style vector,
277    where the user provides the array space to store the vector values.
278 
279    Collective on MPI_Comm
280 
281    Input Parameters:
282 +  comm  - the MPI communicator to use
283 .  bs    - block size, same meaning as VecSetBlockSize()
284 .  n     - local vector length, cannot be PETSC_DECIDE
285 .  N     - global vector length (or PETSC_DECIDE to have calculated)
286 -  array - the user provided array to store the vector values
287 
288    Output Parameter:
289 .  vv - the vector
290 
291    Notes:
292    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
293    same type as an existing vector.
294 
295    If the user-provided array is NULL, then VecPlaceArray() can be used
296    at a later stage to SET the array for storing the vector values.
297 
298    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
299    The user should not free the array until the vector is destroyed.
300 
301    Level: intermediate
302 
303    Concepts: vectors^creating with array
304 
305 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
306           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
307 
308 @*/
309 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
310 {
311   PetscErrorCode ierr;
312 
313   PetscFunctionBegin;
314   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
315   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
316   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
317   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
318   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
319   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
320   PetscFunctionReturn(0);
321 }
322 
323 #undef __FUNCT__
324 #define __FUNCT__ "VecCreateGhostWithArray"
325 /*@C
326    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
327    the caller allocates the array space.
328 
329    Collective on MPI_Comm
330 
331    Input Parameters:
332 +  comm - the MPI communicator to use
333 .  n - local vector length
334 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
335 .  nghost - number of local ghost points
336 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
337 -  array - the space to store the vector values (as long as n + nghost)
338 
339    Output Parameter:
340 .  vv - the global vector representation (without ghost points as part of vector)
341 
342    Notes:
343    Use VecGhostGetLocalForm() to access the local, ghosted representation
344    of the vector.
345 
346    This also automatically sets the ISLocalToGlobalMapping() for this vector.
347 
348    Level: advanced
349 
350    Concepts: vectors^creating with array
351    Concepts: vectors^ghosted
352 
353 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
354           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
355           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
356 
357 @*/
358 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
359 {
360   PetscErrorCode         ierr;
361   Vec_MPI                *w;
362   PetscScalar            *larray;
363   IS                     from,to;
364   ISLocalToGlobalMapping ltog;
365   PetscInt               rstart,i,*indices;
366 
367   PetscFunctionBegin;
368   *vv = 0;
369 
370   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
371   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
372   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
373   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
374   /* Create global representation */
375   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
376   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
377   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
378   w    = (Vec_MPI*)(*vv)->data;
379   /* Create local representation */
380   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
381   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
382   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
383   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
384 
385   /*
386        Create scatter context for scattering (updating) ghost values
387   */
388   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
389   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
390   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
391   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
392   ierr = ISDestroy(&to);CHKERRQ(ierr);
393   ierr = ISDestroy(&from);CHKERRQ(ierr);
394 
395   /* set local to global mapping for ghosted vector */
396   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
397   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
398   for (i=0; i<n; i++) {
399     indices[i] = rstart + i;
400   }
401   for (i=0; i<nghost; i++) {
402     indices[n+i] = ghosts[i];
403   }
404   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
405   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
406   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
407   PetscFunctionReturn(0);
408 }
409 
410 #undef __FUNCT__
411 #define __FUNCT__ "VecCreateGhost"
412 /*@
413    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
414 
415    Collective on MPI_Comm
416 
417    Input Parameters:
418 +  comm - the MPI communicator to use
419 .  n - local vector length
420 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
421 .  nghost - number of local ghost points
422 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
423 
424    Output Parameter:
425 .  vv - the global vector representation (without ghost points as part of vector)
426 
427    Notes:
428    Use VecGhostGetLocalForm() to access the local, ghosted representation
429    of the vector.
430 
431    This also automatically sets the ISLocalToGlobalMapping() for this vector.
432 
433    Level: advanced
434 
435    Concepts: vectors^ghosted
436 
437 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
438           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
439           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
440           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
441 
442 @*/
443 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
444 {
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
449   PetscFunctionReturn(0);
450 }
451 
452 #undef __FUNCT__
453 #define __FUNCT__ "VecMPISetGhost"
454 /*@
455    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
456 
457    Collective on Vec
458 
459    Input Parameters:
460 +  vv - the MPI vector
461 .  nghost - number of local ghost points
462 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
463 
464 
465    Notes:
466    Use VecGhostGetLocalForm() to access the local, ghosted representation
467    of the vector.
468 
469    This also automatically sets the ISLocalToGlobalMapping() for this vector.
470 
471    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
472 
473    Level: advanced
474 
475    Concepts: vectors^ghosted
476 
477 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
478           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
479           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
480           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
481 
482 @*/
483 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
484 {
485   PetscErrorCode ierr;
486   PetscBool      flg;
487 
488   PetscFunctionBegin;
489   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
490   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
491   if (flg) {
492     PetscInt               n,N;
493     Vec_MPI                *w;
494     PetscScalar            *larray;
495     IS                     from,to;
496     ISLocalToGlobalMapping ltog;
497     PetscInt               rstart,i,*indices;
498     MPI_Comm               comm;
499 
500     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
501     n    = vv->map->n;
502     N    = vv->map->N;
503     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
504     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
505     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
506     w    = (Vec_MPI*)(vv)->data;
507     /* Create local representation */
508     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
509     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
510     ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr);
511     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
512 
513     /*
514      Create scatter context for scattering (updating) ghost values
515      */
516     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
517     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
518     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
519     ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr);
520     ierr = ISDestroy(&to);CHKERRQ(ierr);
521     ierr = ISDestroy(&from);CHKERRQ(ierr);
522 
523     /* set local to global mapping for ghosted vector */
524     ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
525     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
526 
527     for (i=0; i<n; i++)      indices[i]   = rstart + i;
528     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
529 
530     ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
531     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
532     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
533   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
534   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
535   PetscFunctionReturn(0);
536 }
537 
538 
539 /* ------------------------------------------------------------------------------------------*/
540 #undef __FUNCT__
541 #define __FUNCT__ "VecCreateGhostBlockWithArray"
542 /*@C
543    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
544    the caller allocates the array space. Indices in the ghost region are based on blocks.
545 
546    Collective on MPI_Comm
547 
548    Input Parameters:
549 +  comm - the MPI communicator to use
550 .  bs - block size
551 .  n - local vector length
552 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
553 .  nghost - number of local ghost blocks
554 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
555 -  array - the space to store the vector values (as long as n + nghost*bs)
556 
557    Output Parameter:
558 .  vv - the global vector representation (without ghost points as part of vector)
559 
560    Notes:
561    Use VecGhostGetLocalForm() to access the local, ghosted representation
562    of the vector.
563 
564    n is the local vector size (total local size not the number of blocks) while nghost
565    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
566    portion is bs*nghost
567 
568    Level: advanced
569 
570    Concepts: vectors^creating ghosted
571    Concepts: vectors^creating with array
572 
573 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
574           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
575           VecCreateGhostWithArray(), VecCreateGhostBlock()
576 
577 @*/
578 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
579 {
580   PetscErrorCode         ierr;
581   Vec_MPI                *w;
582   PetscScalar            *larray;
583   IS                     from,to;
584   ISLocalToGlobalMapping ltog;
585   PetscInt               rstart,i,nb,*indices;
586 
587   PetscFunctionBegin;
588   *vv = 0;
589 
590   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
591   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
592   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
593   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
594   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
595   /* Create global representation */
596   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
597   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
598   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
599   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
600   w    = (Vec_MPI*)(*vv)->data;
601   /* Create local representation */
602   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
603   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
604   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
605   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
606 
607   /*
608        Create scatter context for scattering (updating) ghost values
609   */
610   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
611   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
612   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
613   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
614   ierr = ISDestroy(&to);CHKERRQ(ierr);
615   ierr = ISDestroy(&from);CHKERRQ(ierr);
616 
617   /* set local to global mapping for ghosted vector */
618   nb   = n/bs;
619   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
620   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
621 
622   for (i=0; i<nb; i++)      indices[i]    = rstart + i*bs;
623   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
624 
625   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
626   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
627   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
628   PetscFunctionReturn(0);
629 }
630 
631 #undef __FUNCT__
632 #define __FUNCT__ "VecCreateGhostBlock"
633 /*@
634    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
635         The indicing of the ghost points is done with blocks.
636 
637    Collective on MPI_Comm
638 
639    Input Parameters:
640 +  comm - the MPI communicator to use
641 .  bs - the block size
642 .  n - local vector length
643 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
644 .  nghost - number of local ghost blocks
645 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
646 
647    Output Parameter:
648 .  vv - the global vector representation (without ghost points as part of vector)
649 
650    Notes:
651    Use VecGhostGetLocalForm() to access the local, ghosted representation
652    of the vector.
653 
654    n is the local vector size (total local size not the number of blocks) while nghost
655    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
656    portion is bs*nghost
657 
658    Level: advanced
659 
660    Concepts: vectors^ghosted
661 
662 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
663           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
664           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
665 
666 @*/
667 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
668 {
669   PetscErrorCode ierr;
670 
671   PetscFunctionBegin;
672   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
673   PetscFunctionReturn(0);
674 }
675