xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 7bb14e67d4f5181ffd64ba661152957fefd46218)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "../src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #if 0
8 #undef __FUNCT__
9 #define __FUNCT__ "VecPublish_MPI"
10 static PetscErrorCode VecPublish_MPI(PetscObject obj)
11 {
12   PetscFunctionBegin;
13   PetscFunctionReturn(0);
14 }
15 #endif
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "VecDot_MPI"
19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
26   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
27   *z = sum;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "VecTDot_MPI"
33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34 {
35   PetscScalar    sum,work;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
40   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
41   *z   = sum;
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "VecSetOption_MPI"
47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscBool  flag)
48 {
49   PetscFunctionBegin;
50   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51     v->stash.donotstash = flag;
52   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53     v->stash.ignorenegidx = flag;
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
59 EXTERN_C_BEGIN
60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
61 EXTERN_C_END
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "VecPlaceArray_MPI"
65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66 {
67   PetscErrorCode ierr;
68   Vec_MPI        *v = (Vec_MPI *)vin->data;
69 
70   PetscFunctionBegin;
71   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
73   v->array = (PetscScalar *)a;
74   if (v->localrep) {
75     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "VecResetArray_MPI"
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI        *v = (Vec_MPI *)vin->data;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   v->array         = v->unplacedarray;
89   v->unplacedarray = 0;
90   if (v->localrep) {
91     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
97 
98 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
99             VecDuplicateVecs_Default,
100             VecDestroyVecs_Default,
101             VecDot_MPI,
102             VecMDot_MPI,
103             VecNorm_MPI,
104             VecTDot_MPI,
105             VecMTDot_MPI,
106             VecScale_Seq,
107             VecCopy_Seq, /* 10 */
108             VecSet_Seq,
109             VecSwap_Seq,
110             VecAXPY_Seq,
111             VecAXPBY_Seq,
112             VecMAXPY_Seq,
113             VecAYPX_Seq,
114             VecWAXPY_Seq,
115             VecAXPBYPCZ_Seq,
116             VecPointwiseMult_Seq,
117             VecPointwiseDivide_Seq,
118             VecSetValues_MPI, /* 20 */
119             VecAssemblyBegin_MPI,
120             VecAssemblyEnd_MPI,
121             VecGetArray_Seq,
122             VecGetSize_MPI,
123             VecGetSize_Seq,
124             VecRestoreArray_Seq,
125             VecMax_MPI,
126             VecMin_MPI,
127             VecSetRandom_Seq,
128             VecSetOption_MPI,
129             VecSetValuesBlocked_MPI,
130             VecDestroy_MPI,
131             VecView_MPI,
132             VecPlaceArray_MPI,
133             VecReplaceArray_Seq,
134             VecDot_Seq,
135             VecTDot_Seq,
136             VecNorm_Seq,
137             VecMDot_Seq,
138             VecMTDot_Seq,
139 	    VecLoad_Default,
140             VecReciprocal_Default,
141             VecConjugate_Seq,
142             0,
143             0,
144             VecResetArray_MPI,
145             0,
146             VecMaxPointwiseDivide_Seq,
147             VecPointwiseMax_Seq,
148             VecPointwiseMaxAbs_Seq,
149             VecPointwiseMin_Seq,
150   	    VecGetValues_MPI,
151     	    0,
152     	    0,
153     	    0,
154     	    0,
155     	    0,
156     	    0,
157    	    VecStrideGather_Default,
158    	    VecStrideScatter_Default
159 };
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "VecCreate_MPI_Private"
163 /*
164     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
165     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
166     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
167 
168     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
169     no space is allocated.
170 */
171 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool  alloc,PetscInt nghost,const PetscScalar array[])
172 {
173   Vec_MPI        *s;
174   PetscErrorCode ierr;
175 
176   PetscFunctionBegin;
177 
178   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
179   v->data        = (void*)s;
180   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
181   s->nghost      = nghost;
182   v->mapping     = 0;
183   v->bmapping    = 0;
184   v->petscnative = PETSC_TRUE;
185 
186   if (v->map->bs == -1) v->map->bs = 1;
187   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
188   s->array           = (PetscScalar *)array;
189   s->array_allocated = 0;
190   if (alloc && !array) {
191     PetscInt n         = v->map->n+nghost;
192     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
193     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
194     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
195     s->array_allocated = s->array;
196   }
197 
198   /* By default parallel vectors do not have local representation */
199   s->localrep    = 0;
200   s->localupdate = 0;
201 
202   v->stash.insertmode  = NOT_SET_VALUES;
203   /* create the stashes. The block-size for bstash is set later when
204      VecSetValuesBlocked is called.
205   */
206   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
207   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
208 
209 #if defined(PETSC_HAVE_MATLAB_ENGINE)
210   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
211   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
212 #endif
213   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
214   PetscFunctionReturn(0);
215 }
216 
217 /*MC
218    VECMPI - VECMPI = "mpi" - The basic parallel vector
219 
220    Options Database Keys:
221 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
222 
223   Level: beginner
224 
225 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
226 M*/
227 
228 EXTERN_C_BEGIN
229 #undef __FUNCT__
230 #define __FUNCT__ "VecCreate_MPI"
231 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
232 {
233   PetscErrorCode ierr;
234 
235   PetscFunctionBegin;
236   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 EXTERN_C_END
240 
241 /*MC
242    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
243 
244    Options Database Keys:
245 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions
246 
247   Level: beginner
248 
249 .seealso: VecCreateSeq(), VecCreateMPI()
250 M*/
251 
252 EXTERN_C_BEGIN
253 #undef __FUNCT__
254 #define __FUNCT__ "VecCreate_Standard"
255 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Standard(Vec v)
256 {
257   PetscErrorCode ierr;
258   PetscMPIInt    size;
259 
260   PetscFunctionBegin;
261   ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
262   if (size == 1) {
263     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
264   } else {
265     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
266   }
267   PetscFunctionReturn(0);
268 }
269 EXTERN_C_END
270 
271 
272 #undef __FUNCT__
273 #define __FUNCT__ "VecCreateMPIWithArray"
274 /*@C
275    VecCreateMPIWithArray - Creates a parallel, array-style vector,
276    where the user provides the array space to store the vector values.
277 
278    Collective on MPI_Comm
279 
280    Input Parameters:
281 +  comm  - the MPI communicator to use
282 .  n     - local vector length, cannot be PETSC_DECIDE
283 .  N     - global vector length (or PETSC_DECIDE to have calculated)
284 -  array - the user provided array to store the vector values
285 
286    Output Parameter:
287 .  vv - the vector
288 
289    Notes:
290    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
291    same type as an existing vector.
292 
293    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
294    at a later stage to SET the array for storing the vector values.
295 
296    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
297    The user should not free the array until the vector is destroyed.
298 
299    Level: intermediate
300 
301    Concepts: vectors^creating with array
302 
303 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
304           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
305 
306 @*/
307 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
308 {
309   PetscErrorCode ierr;
310 
311   PetscFunctionBegin;
312   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
313   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
314   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
315   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
316   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
317   PetscFunctionReturn(0);
318 }
319 
320 #undef __FUNCT__
321 #define __FUNCT__ "VecGhostStateSync_Private"
322 /*
323   This is used in VecGhostGetLocalForm and VecGhostRestoreLocalForm to ensure
324   that the state is updated if either vector has changed since the last time
325   one of these functions was called.  It could apply to any PetscObject, but
326   VecGhost is quite different from other objects in that two separate vectors
327   look at the same memory.
328 
329   In principle, we could only propagate state to the local vector on
330   GetLocalForm and to the global vector on RestoreLocalForm, but this version is
331   more conservative (i.e. robust against misuse) and simpler.
332 
333   Note that this function is correct and changes nothing if both arguments are the
334   same, which is the case in serial.
335 */
336 static PetscErrorCode VecGhostStateSync_Private(Vec g,Vec l)
337 {
338   PetscErrorCode ierr;
339   PetscInt       gstate,lstate;
340 
341   PetscFunctionBegin;
342   ierr = PetscObjectStateQuery((PetscObject)g,&gstate);CHKERRQ(ierr);
343   ierr = PetscObjectStateQuery((PetscObject)l,&lstate);CHKERRQ(ierr);
344   ierr = PetscObjectSetState((PetscObject)g,PetscMax(gstate,lstate));CHKERRQ(ierr);
345   ierr = PetscObjectSetState((PetscObject)l,PetscMax(gstate,lstate));CHKERRQ(ierr);
346   PetscFunctionReturn(0);
347 }
348 
349 #undef __FUNCT__
350 #define __FUNCT__ "VecGhostGetLocalForm"
351 /*@
352     VecGhostGetLocalForm - Obtains the local ghosted representation of
353     a parallel vector created with VecCreateGhost().
354 
355     Not Collective
356 
357     Input Parameter:
358 .   g - the global vector. Vector must be have been obtained with either
359         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
360 
361     Output Parameter:
362 .   l - the local (ghosted) representation
363 
364     Notes:
365     This routine does not actually update the ghost values, but rather it
366     returns a sequential vector that includes the locations for the ghost
367     values and their current values. The returned vector and the original
368     vector passed in share the same array that contains the actual vector data.
369 
370     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
371     finished using the object.
372 
373     Level: advanced
374 
375    Concepts: vectors^ghost point access
376 
377 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
378 
379 @*/
380 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
381 {
382   PetscErrorCode ierr;
383   PetscBool      isseq,ismpi;
384 
385   PetscFunctionBegin;
386   PetscValidHeaderSpecific(g,VEC_CLASSID,1);
387   PetscValidPointer(l,2);
388 
389   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
390   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
391   if (ismpi) {
392     Vec_MPI *v  = (Vec_MPI*)g->data;
393     if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
394     *l = v->localrep;
395   } else if (isseq) {
396     *l = g;
397   } else {
398     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
399   }
400   ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr);
401   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "VecGhostRestoreLocalForm"
407 /*@
408     VecGhostRestoreLocalForm - Restores the local ghosted representation of
409     a parallel vector obtained with VecGhostGetLocalForm().
410 
411     Not Collective
412 
413     Input Parameter:
414 +   g - the global vector
415 -   l - the local (ghosted) representation
416 
417     Notes:
418     This routine does not actually update the ghost values, but rather it
419     returns a sequential vector that includes the locations for the ghost values
420     and their current values.
421 
422     Level: advanced
423 
424 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
425 @*/
426 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
427 {
428   PetscErrorCode ierr;
429 
430   PetscFunctionBegin;
431   ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr);
432   ierr = PetscObjectDereference((PetscObject)*l);CHKERRQ(ierr);
433   PetscFunctionReturn(0);
434 }
435 
436 #undef __FUNCT__
437 #define __FUNCT__ "VecGhostUpdateBegin"
438 /*@
439    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
440    local representation to global or global representation to local.
441 
442    Neighbor-wise Collective on Vec
443 
444    Input Parameters:
445 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
446 .  insertmode - one of ADD_VALUES or INSERT_VALUES
447 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
448 
449    Notes:
450    Use the following to update the ghost regions with correct values from the owning process
451 .vb
452        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
453        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
454 .ve
455 
456    Use the following to accumulate the ghost region values onto the owning processors
457 .vb
458        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
459        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
460 .ve
461 
462    To accumulate the ghost region values onto the owning processors and then update
463    the ghost regions correctly, call the later followed by the former, i.e.,
464 .vb
465        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
466        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
467        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
468        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
469 .ve
470 
471    Level: advanced
472 
473 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
474           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
475 
476 @*/
477 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
478 {
479   Vec_MPI        *v;
480   PetscErrorCode ierr;
481 
482   PetscFunctionBegin;
483   PetscValidHeaderSpecific(g,VEC_CLASSID,1);
484 
485   v  = (Vec_MPI*)g->data;
486   if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
487   if (!v->localupdate) PetscFunctionReturn(0);
488 
489   if (scattermode == SCATTER_REVERSE) {
490     ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
491   } else {
492     ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
493   }
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "VecGhostUpdateEnd"
499 /*@
500    VecGhostUpdateEnd - End the vector scatter to update the vector from
501    local representation to global or global representation to local.
502 
503    Neighbor-wise Collective on Vec
504 
505    Input Parameters:
506 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
507 .  insertmode - one of ADD_VALUES or INSERT_VALUES
508 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
509 
510    Notes:
511 
512    Use the following to update the ghost regions with correct values from the owning process
513 .vb
514        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
515        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
516 .ve
517 
518    Use the following to accumulate the ghost region values onto the owning processors
519 .vb
520        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
521        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
522 .ve
523 
524    To accumulate the ghost region values onto the owning processors and then update
525    the ghost regions correctly, call the later followed by the former, i.e.,
526 .vb
527        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
528        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
529        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
530        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
531 .ve
532 
533    Level: advanced
534 
535 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
536           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
537 
538 @*/
539 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
540 {
541   Vec_MPI        *v;
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(g,VEC_CLASSID,1);
546 
547   v  = (Vec_MPI*)g->data;
548   if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
549   if (!v->localupdate) PetscFunctionReturn(0);
550 
551   if (scattermode == SCATTER_REVERSE) {
552     ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
553   } else {
554     ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
555   }
556   PetscFunctionReturn(0);
557 }
558 
559 #undef __FUNCT__
560 #define __FUNCT__ "VecCreateGhostWithArray"
561 /*@C
562    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
563    the caller allocates the array space.
564 
565    Collective on MPI_Comm
566 
567    Input Parameters:
568 +  comm - the MPI communicator to use
569 .  n - local vector length
570 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
571 .  nghost - number of local ghost points
572 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
573 -  array - the space to store the vector values (as long as n + nghost)
574 
575    Output Parameter:
576 .  vv - the global vector representation (without ghost points as part of vector)
577 
578    Notes:
579    Use VecGhostGetLocalForm() to access the local, ghosted representation
580    of the vector.
581 
582    This also automatically sets the ISLocalToGlobalMapping() for this vector.
583 
584    Level: advanced
585 
586    Concepts: vectors^creating with array
587    Concepts: vectors^ghosted
588 
589 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
590           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
591           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
592 
593 @*/
594 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
595 {
596   PetscErrorCode         ierr;
597   Vec_MPI                *w;
598   PetscScalar            *larray;
599   IS                     from,to;
600   ISLocalToGlobalMapping ltog;
601   PetscInt               rstart,i,*indices;
602 
603   PetscFunctionBegin;
604   *vv = 0;
605 
606   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
607   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
608   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
609   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
610   /* Create global representation */
611   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
612   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
613   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
614   w    = (Vec_MPI *)(*vv)->data;
615   /* Create local representation */
616   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
617   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
618   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
619   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
620 
621   /*
622        Create scatter context for scattering (updating) ghost values
623   */
624   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
625   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
626   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
627   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
628   ierr = ISDestroy(to);CHKERRQ(ierr);
629   ierr = ISDestroy(from);CHKERRQ(ierr);
630 
631   /* set local to global mapping for ghosted vector */
632   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
633   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
634   for (i=0; i<n; i++) {
635     indices[i] = rstart + i;
636   }
637   for (i=0; i<nghost; i++) {
638     indices[n+i] = ghosts[i];
639   }
640   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
641   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
642   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
643   PetscFunctionReturn(0);
644 }
645 
646 #undef __FUNCT__
647 #define __FUNCT__ "VecCreateGhost"
648 /*@
649    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
650 
651    Collective on MPI_Comm
652 
653    Input Parameters:
654 +  comm - the MPI communicator to use
655 .  n - local vector length
656 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
657 .  nghost - number of local ghost points
658 -  ghosts - global indices of ghost points
659 
660    Output Parameter:
661 .  vv - the global vector representation (without ghost points as part of vector)
662 
663    Notes:
664    Use VecGhostGetLocalForm() to access the local, ghosted representation
665    of the vector.
666 
667    This also automatically sets the ISLocalToGlobalMapping() for this vector.
668 
669    Level: advanced
670 
671    Concepts: vectors^ghosted
672 
673 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
674           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
675           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
676           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
677 
678 @*/
679 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
680 {
681   PetscErrorCode ierr;
682 
683   PetscFunctionBegin;
684   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
685   PetscFunctionReturn(0);
686 }
687 
688 #undef __FUNCT__
689 #define __FUNCT__ "VecDuplicate_MPI"
690 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
691 {
692   PetscErrorCode ierr;
693   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
694   PetscScalar    *array;
695 
696   PetscFunctionBegin;
697   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
698 
699   /* use the map that exists aleady in win */
700   ierr = PetscLayoutDestroy((*v)->map);CHKERRQ(ierr);
701   (*v)->map = win->map;
702   win->map->refcnt++;
703 
704   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
705   vw   = (Vec_MPI *)(*v)->data;
706   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
707 
708   /* save local representation of the parallel vector (and scatter) if it exists */
709   if (w->localrep) {
710     ierr = VecGetArrayPrivate(*v,&array);CHKERRQ(ierr);
711     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
712     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
713     ierr = VecRestoreArrayPrivate(*v,&array);CHKERRQ(ierr);
714     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
715     vw->localupdate = w->localupdate;
716     if (vw->localupdate) {
717       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
718     }
719   }
720 
721   /* New vector should inherit stashing property of parent */
722   (*v)->stash.donotstash = win->stash.donotstash;
723   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
724 
725   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
726   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
727   if (win->mapping) {
728     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
729     (*v)->mapping = win->mapping;
730   }
731   if (win->bmapping) {
732     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
733     (*v)->bmapping = win->bmapping;
734   }
735   (*v)->map->bs    = win->map->bs;
736   (*v)->bstash.bs = win->bstash.bs;
737 
738   PetscFunctionReturn(0);
739 }
740 
741 /* ------------------------------------------------------------------------------------------*/
742 #undef __FUNCT__
743 #define __FUNCT__ "VecCreateGhostBlockWithArray"
744 /*@C
745    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
746    the caller allocates the array space. Indices in the ghost region are based on blocks.
747 
748    Collective on MPI_Comm
749 
750    Input Parameters:
751 +  comm - the MPI communicator to use
752 .  bs - block size
753 .  n - local vector length
754 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
755 .  nghost - number of local ghost blocks
756 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
757 -  array - the space to store the vector values (as long as n + nghost*bs)
758 
759    Output Parameter:
760 .  vv - the global vector representation (without ghost points as part of vector)
761 
762    Notes:
763    Use VecGhostGetLocalForm() to access the local, ghosted representation
764    of the vector.
765 
766    n is the local vector size (total local size not the number of blocks) while nghost
767    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
768    portion is bs*nghost
769 
770    Level: advanced
771 
772    Concepts: vectors^creating ghosted
773    Concepts: vectors^creating with array
774 
775 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
776           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
777           VecCreateGhostWithArray(), VecCreateGhostBlocked()
778 
779 @*/
780 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
781 {
782   PetscErrorCode ierr;
783   Vec_MPI        *w;
784   PetscScalar    *larray;
785   IS             from,to;
786   ISLocalToGlobalMapping ltog;
787   PetscInt       rstart,i,nb,*indices;
788 
789   PetscFunctionBegin;
790   *vv = 0;
791 
792   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
793   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
794   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
795   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
796   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
797   /* Create global representation */
798   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
799   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
800   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
801   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
802   w    = (Vec_MPI *)(*vv)->data;
803   /* Create local representation */
804   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
805   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
806   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
807   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
808   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
809 
810   /*
811        Create scatter context for scattering (updating) ghost values
812   */
813   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
814   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
815   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
816   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
817   ierr = ISDestroy(to);CHKERRQ(ierr);
818   ierr = ISDestroy(from);CHKERRQ(ierr);
819 
820   /* set local to global mapping for ghosted vector */
821   nb = n/bs;
822   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
823   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
824   for (i=0; i<nb; i++) {
825     indices[i] = rstart + i*bs;
826   }
827   for (i=0; i<nghost; i++) {
828     indices[nb+i] = ghosts[i];
829   }
830   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
831   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
832   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
833   PetscFunctionReturn(0);
834 }
835 
836 #undef __FUNCT__
837 #define __FUNCT__ "VecCreateGhostBlock"
838 /*@
839    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
840         The indicing of the ghost points is done with blocks.
841 
842    Collective on MPI_Comm
843 
844    Input Parameters:
845 +  comm - the MPI communicator to use
846 .  bs - the block size
847 .  n - local vector length
848 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
849 .  nghost - number of local ghost blocks
850 -  ghosts - global indices of ghost blocks
851 
852    Output Parameter:
853 .  vv - the global vector representation (without ghost points as part of vector)
854 
855    Notes:
856    Use VecGhostGetLocalForm() to access the local, ghosted representation
857    of the vector.
858 
859    n is the local vector size (total local size not the number of blocks) while nghost
860    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
861    portion is bs*nghost
862 
863    Level: advanced
864 
865    Concepts: vectors^ghosted
866 
867 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
868           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
869           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
870 
871 @*/
872 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
873 {
874   PetscErrorCode ierr;
875 
876   PetscFunctionBegin;
877   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
878   PetscFunctionReturn(0);
879 }
880