xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 9be4fee84348b1eaee2eca38a590c7003da9a71b)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8        Note this code is very similar to VecPublish_Seq()
9 */
10 #undef __FUNCT__
11 #define __FUNCT__ "VecPublish_MPI"
12 static PetscErrorCode VecPublish_MPI(PetscObject obj)
13 {
14   PetscFunctionBegin;
15   PetscFunctionReturn(0);
16 }
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "VecDot_MPI"
20 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
28   *z = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "VecTDot_MPI"
34 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
35 {
36   PetscScalar    sum,work;
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
41   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
42   *z   = sum;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "VecSetOption_MPI"
48 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
49 {
50   PetscFunctionBegin;
51   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
52     v->stash.donotstash = PETSC_TRUE;
53   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
54     v->stash.donotstash = PETSC_FALSE;
55   }
56   PetscFunctionReturn(0);
57 }
58 
59 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
60 EXTERN_C_BEGIN
61 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
62 EXTERN_C_END
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "VecPlaceArray_MPI"
66 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
67 {
68   PetscErrorCode ierr;
69   Vec_MPI        *v = (Vec_MPI *)vin->data;
70 
71   PetscFunctionBegin;
72   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
73   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
74   v->array = (PetscScalar *)a;
75   if (v->localrep) {
76     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
77   }
78   PetscFunctionReturn(0);
79 }
80 
81 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
82 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
83 
84 static struct _VecOps DvOps = { VecDuplicate_MPI,
85             VecDuplicateVecs_Default,
86             VecDestroyVecs_Default,
87             VecDot_MPI,
88             VecMDot_MPI,
89             VecNorm_MPI,
90             VecTDot_MPI,
91             VecMTDot_MPI,
92             VecScale_Seq,
93             VecCopy_Seq,
94             VecSet_Seq,
95             VecSwap_Seq,
96             VecAXPY_Seq,
97             VecAXPBY_Seq,
98             VecMAXPY_Seq,
99             VecAYPX_Seq,
100             VecWAXPY_Seq,
101             VecPointwiseMult_Seq,
102             VecPointwiseDivide_Seq,
103             VecSetValues_MPI,
104             VecAssemblyBegin_MPI,
105             VecAssemblyEnd_MPI,
106             VecGetArray_Seq,
107             VecGetSize_MPI,
108             VecGetSize_Seq,
109             VecRestoreArray_Seq,
110             VecMax_MPI,
111             VecMin_MPI,
112             VecSetRandom_Seq,
113             VecSetOption_MPI,
114             VecSetValuesBlocked_MPI,
115             VecDestroy_MPI,
116             VecView_MPI,
117             VecPlaceArray_MPI,
118             VecReplaceArray_Seq,
119             VecDot_Seq,
120             VecTDot_Seq,
121             VecNorm_Seq,
122             VecMDot_Seq,
123             VecMTDot_Seq,
124             VecLoadIntoVector_Default,
125             VecReciprocal_Default,
126             0, /* VecViewNative... */
127             VecConjugate_Seq,
128             0,
129             0,
130             VecResetArray_Seq,
131             0,
132             VecMaxPointwiseDivide_Seq,
133             VecLoad_Binary,
134             VecPointwiseMax_Seq,
135             VecPointwiseMaxAbs_Seq,
136             VecPointwiseMin_Seq,
137             VecGetValues_MPI};
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "VecCreate_MPI_Private"
141 /*
142     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
143     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
144     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
145 */
146 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[],PetscMap map)
147 {
148   Vec_MPI        *s;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152 
153   v->bops->publish   = VecPublish_MPI;
154   ierr = PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));CHKERRQ(ierr);
155   ierr           = PetscNew(Vec_MPI,&s);CHKERRQ(ierr);
156   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
157   v->data        = (void*)s;
158   s->nghost      = nghost;
159   v->mapping     = 0;
160   v->bmapping    = 0;
161   v->petscnative = PETSC_TRUE;
162 
163   if (array) {
164     s->array           = (PetscScalar *)array;
165     s->array_allocated = 0;
166   } else {
167     PetscInt n         = v->n+nghost;
168     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
169     s->array_allocated = s->array;
170     ierr               = PetscMemzero(s->array,v->n*sizeof(PetscScalar));CHKERRQ(ierr);
171   }
172 
173   /* By default parallel vectors do not have local representation */
174   s->localrep    = 0;
175   s->localupdate = 0;
176 
177   v->stash.insertmode  = NOT_SET_VALUES;
178 
179   if (!v->map) {
180     if (!map) {
181       ierr = PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);CHKERRQ(ierr);
182     } else {
183       v->map = map;
184       ierr = PetscObjectReference((PetscObject)map);CHKERRQ(ierr);
185     }
186   }
187   /* create the stashes. The block-size for bstash is set later when
188      VecSetValuesBlocked is called.
189   */
190   ierr = VecStashCreate_Private(v->comm,1,&v->stash);CHKERRQ(ierr);
191   ierr = VecStashCreate_Private(v->comm,v->bs,&v->bstash);CHKERRQ(ierr);
192 
193 #if defined(PETSC_HAVE_MATLAB)
194   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
195   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
196 #endif
197   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
198   ierr = PetscPublishAll(v);CHKERRQ(ierr);
199   PetscFunctionReturn(0);
200 }
201 
202 /*MC
203    VECMPI - VECMPI = "mpi" - The basic parallel vector
204 
205    Options Database Keys:
206 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
207 
208   Level: beginner
209 
210 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
211 M*/
212 
213 EXTERN_C_BEGIN
214 #undef __FUNCT__
215 #define __FUNCT__ "VecCreate_MPI"
216 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
217 {
218   PetscErrorCode ierr;
219 
220   PetscFunctionBegin;
221   if (vv->bs > 0) {
222     ierr = PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);CHKERRQ(ierr);
223   } else {
224     ierr = PetscSplitOwnership(vv->comm,&vv->n,&vv->N);CHKERRQ(ierr);
225   }
226   ierr = VecCreate_MPI_Private(vv,0,0,PETSC_NULL);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 EXTERN_C_END
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "VecCreateMPIWithArray"
233 /*@C
234    VecCreateMPIWithArray - Creates a parallel, array-style vector,
235    where the user provides the array space to store the vector values.
236 
237    Collective on MPI_Comm
238 
239    Input Parameters:
240 +  comm  - the MPI communicator to use
241 .  n     - local vector length, cannot be PETSC_DECIDE
242 .  N     - global vector length (or PETSC_DECIDE to have calculated)
243 -  array - the user provided array to store the vector values
244 
245    Output Parameter:
246 .  vv - the vector
247 
248    Notes:
249    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
250    same type as an existing vector.
251 
252    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
253    at a later stage to SET the array for storing the vector values.
254 
255    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
256    The user should not free the array until the vector is destroyed.
257 
258    Level: intermediate
259 
260    Concepts: vectors^creating with array
261 
262 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
263           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
264 
265 @*/
266 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
267 {
268   PetscErrorCode ierr;
269 
270   PetscFunctionBegin;
271   if (n == PETSC_DECIDE) {
272     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
273   }
274   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
275   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
276   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
277   ierr = VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);CHKERRQ(ierr);
278   PetscFunctionReturn(0);
279 }
280 
281 #undef __FUNCT__
282 #define __FUNCT__ "VecGhostGetLocalForm"
283 /*@
284     VecGhostGetLocalForm - Obtains the local ghosted representation of
285     a parallel vector created with VecCreateGhost().
286 
287     Not Collective
288 
289     Input Parameter:
290 .   g - the global vector. Vector must be have been obtained with either
291         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
292 
293     Output Parameter:
294 .   l - the local (ghosted) representation
295 
296     Notes:
297     This routine does not actually update the ghost values, but rather it
298     returns a sequential vector that includes the locations for the ghost
299     values and their current values. The returned vector and the original
300     vector passed in share the same array that contains the actual vector data.
301 
302     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
303     finished using the object.
304 
305     Level: advanced
306 
307    Concepts: vectors^ghost point access
308 
309 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
310 
311 @*/
312 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
313 {
314   PetscErrorCode ierr;
315   PetscTruth     isseq,ismpi;
316 
317   PetscFunctionBegin;
318   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
319   PetscValidPointer(l,2);
320 
321   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
322   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
323   if (ismpi) {
324     Vec_MPI *v  = (Vec_MPI*)g->data;
325     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
326     *l = v->localrep;
327   } else if (isseq) {
328     *l = g;
329   } else {
330     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
331   }
332   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
333   PetscFunctionReturn(0);
334 }
335 
336 #undef __FUNCT__
337 #define __FUNCT__ "VecGhostRestoreLocalForm"
338 /*@
339     VecGhostRestoreLocalForm - Restores the local ghosted representation of
340     a parallel vector obtained with VecGhostGetLocalForm().
341 
342     Not Collective
343 
344     Input Parameter:
345 +   g - the global vector
346 -   l - the local (ghosted) representation
347 
348     Notes:
349     This routine does not actually update the ghost values, but rather it
350     returns a sequential vector that includes the locations for the ghost values
351     and their current values.
352 
353     Level: advanced
354 
355 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
356 @*/
357 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
358 {
359   PetscFunctionBegin;
360   PetscObjectDereference((PetscObject)*l);
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "VecGhostUpdateBegin"
366 /*@
367    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
368    local representation to global or global representation to local.
369 
370    Collective on Vec
371 
372    Input Parameters:
373 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
374 .  insertmode - one of ADD_VALUES or INSERT_VALUES
375 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
376 
377    Notes:
378    Use the following to update the ghost regions with correct values from the owning process
379 .vb
380        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
381        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
382 .ve
383 
384    Use the following to accumulate the ghost region values onto the owning processors
385 .vb
386        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
387        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
388 .ve
389 
390    To accumulate the ghost region values onto the owning processors and then update
391    the ghost regions correctly, call the later followed by the former, i.e.,
392 .vb
393        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
394        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
395        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
396        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
397 .ve
398 
399    Level: advanced
400 
401 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
402           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
403 
404 @*/
405 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
406 {
407   Vec_MPI        *v;
408   PetscErrorCode ierr;
409 
410   PetscFunctionBegin;
411   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
412 
413   v  = (Vec_MPI*)g->data;
414   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
415   if (!v->localupdate) PetscFunctionReturn(0);
416 
417   if (scattermode == SCATTER_REVERSE) {
418     ierr = VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
419   } else {
420     ierr = VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
421   }
422   PetscFunctionReturn(0);
423 }
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "VecGhostUpdateEnd"
427 /*@
428    VecGhostUpdateEnd - End the vector scatter to update the vector from
429    local representation to global or global representation to local.
430 
431    Collective on Vec
432 
433    Input Parameters:
434 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
435 .  insertmode - one of ADD_VALUES or INSERT_VALUES
436 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
437 
438    Notes:
439 
440    Use the following to update the ghost regions with correct values from the owning process
441 .vb
442        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
443        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
444 .ve
445 
446    Use the following to accumulate the ghost region values onto the owning processors
447 .vb
448        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
449        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
450 .ve
451 
452    To accumulate the ghost region values onto the owning processors and then update
453    the ghost regions correctly, call the later followed by the former, i.e.,
454 .vb
455        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
456        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
457        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
458        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
459 .ve
460 
461    Level: advanced
462 
463 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
464           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
465 
466 @*/
467 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
468 {
469   Vec_MPI        *v;
470   PetscErrorCode ierr;
471 
472   PetscFunctionBegin;
473   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
474 
475   v  = (Vec_MPI*)g->data;
476   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
477   if (!v->localupdate) PetscFunctionReturn(0);
478 
479   if (scattermode == SCATTER_REVERSE) {
480     ierr = VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
481   } else {
482     ierr = VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
483   }
484   PetscFunctionReturn(0);
485 }
486 
487 #undef __FUNCT__
488 #define __FUNCT__ "VecCreateGhostWithArray"
489 /*@C
490    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
491    the caller allocates the array space.
492 
493    Collective on MPI_Comm
494 
495    Input Parameters:
496 +  comm - the MPI communicator to use
497 .  n - local vector length
498 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
499 .  nghost - number of local ghost points
500 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
501 -  array - the space to store the vector values (as long as n + nghost)
502 
503    Output Parameter:
504 .  vv - the global vector representation (without ghost points as part of vector)
505 
506    Notes:
507    Use VecGhostGetLocalForm() to access the local, ghosted representation
508    of the vector.
509 
510    Level: advanced
511 
512    Concepts: vectors^creating with array
513    Concepts: vectors^ghosted
514 
515 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
516           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
517           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
518 
519 @*/
520 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
521 {
522   PetscErrorCode ierr;
523   Vec_MPI        *w;
524   PetscScalar    *larray;
525   IS             from,to;
526 
527   PetscFunctionBegin;
528   *vv = 0;
529 
530   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
531   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
532   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
533   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
534   /* Create global representation */
535   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
536   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
537   ierr = VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);CHKERRQ(ierr);
538   w    = (Vec_MPI *)(*vv)->data;
539   /* Create local representation */
540   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
541   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
542   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
543   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
544 
545   /*
546        Create scatter context for scattering (updating) ghost values
547   */
548   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
549   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
550   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
551   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
552   ierr = ISDestroy(to);CHKERRQ(ierr);
553   ierr = ISDestroy(from);CHKERRQ(ierr);
554 
555   PetscFunctionReturn(0);
556 }
557 
558 #undef __FUNCT__
559 #define __FUNCT__ "VecCreateGhost"
560 /*@
561    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
562 
563    Collective on MPI_Comm
564 
565    Input Parameters:
566 +  comm - the MPI communicator to use
567 .  n - local vector length
568 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
569 .  nghost - number of local ghost points
570 -  ghosts - global indices of ghost points
571 
572    Output Parameter:
573 .  vv - the global vector representation (without ghost points as part of vector)
574 
575    Notes:
576    Use VecGhostGetLocalForm() to access the local, ghosted representation
577    of the vector.
578 
579    Level: advanced
580 
581    Concepts: vectors^ghosted
582 
583 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
584           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
585           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
586           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
587 
588 @*/
589 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
590 {
591   PetscErrorCode ierr;
592 
593   PetscFunctionBegin;
594   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
595   PetscFunctionReturn(0);
596 }
597 
598 #undef __FUNCT__
599 #define __FUNCT__ "VecDuplicate_MPI"
600 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
601 {
602   PetscErrorCode ierr;
603   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
604   PetscScalar    *array;
605 
606   PetscFunctionBegin;
607   ierr = VecCreate(win->comm,v);CHKERRQ(ierr);
608   ierr = VecSetSizes(*v,win->n,win->N);CHKERRQ(ierr);
609   ierr = VecCreate_MPI_Private(*v,w->nghost,0,win->map);CHKERRQ(ierr);
610   vw   = (Vec_MPI *)(*v)->data;
611   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
612 
613   /* save local representation of the parallel vector (and scatter) if it exists */
614   if (w->localrep) {
615     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
616     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
617     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
618     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
619     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
620     vw->localupdate = w->localupdate;
621     if (vw->localupdate) {
622       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
623     }
624   }
625 
626   /* New vector should inherit stashing property of parent */
627   (*v)->stash.donotstash = win->stash.donotstash;
628 
629   ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr);
630   ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr);
631   if (win->mapping) {
632     (*v)->mapping = win->mapping;
633     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
634   }
635   if (win->bmapping) {
636     (*v)->bmapping = win->bmapping;
637     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
638   }
639   (*v)->bs        = win->bs;
640   (*v)->bstash.bs = win->bstash.bs;
641 
642   PetscFunctionReturn(0);
643 }
644 
645 /* ------------------------------------------------------------------------------------------*/
646 #undef __FUNCT__
647 #define __FUNCT__ "VecCreateGhostBlockWithArray"
648 /*@C
649    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
650    the caller allocates the array space. Indices in the ghost region are based on blocks.
651 
652    Collective on MPI_Comm
653 
654    Input Parameters:
655 +  comm - the MPI communicator to use
656 .  bs - block size
657 .  n - local vector length
658 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
659 .  nghost - number of local ghost blocks
660 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
661 -  array - the space to store the vector values (as long as n + nghost*bs)
662 
663    Output Parameter:
664 .  vv - the global vector representation (without ghost points as part of vector)
665 
666    Notes:
667    Use VecGhostGetLocalForm() to access the local, ghosted representation
668    of the vector.
669 
670    n is the local vector size (total local size not the number of blocks) while nghost
671    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
672    portion is bs*nghost
673 
674    Level: advanced
675 
676    Concepts: vectors^creating ghosted
677    Concepts: vectors^creating with array
678 
679 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
680           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
681           VecCreateGhostWithArray(), VecCreateGhostBlocked()
682 
683 @*/
684 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
685 {
686   PetscErrorCode ierr;
687   Vec_MPI        *w;
688   PetscScalar    *larray;
689   IS             from,to;
690 
691   PetscFunctionBegin;
692   *vv = 0;
693 
694   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
695   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
696   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
697   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
698   /* Create global representation */
699   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
700   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
701   ierr = VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);CHKERRQ(ierr);
702   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
703   w    = (Vec_MPI *)(*vv)->data;
704   /* Create local representation */
705   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
706   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
707   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
708   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
709   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
710 
711   /*
712        Create scatter context for scattering (updating) ghost values
713   */
714   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
715   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
716   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
717   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
718   ierr = ISDestroy(to);CHKERRQ(ierr);
719   ierr = ISDestroy(from);CHKERRQ(ierr);
720 
721   PetscFunctionReturn(0);
722 }
723 
724 #undef __FUNCT__
725 #define __FUNCT__ "VecCreateGhostBlock"
726 /*@
727    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
728         The indicing of the ghost points is done with blocks.
729 
730    Collective on MPI_Comm
731 
732    Input Parameters:
733 +  comm - the MPI communicator to use
734 .  bs - the block size
735 .  n - local vector length
736 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
737 .  nghost - number of local ghost blocks
738 -  ghosts - global indices of ghost blocks
739 
740    Output Parameter:
741 .  vv - the global vector representation (without ghost points as part of vector)
742 
743    Notes:
744    Use VecGhostGetLocalForm() to access the local, ghosted representation
745    of the vector.
746 
747    n is the local vector size (total local size not the number of blocks) while nghost
748    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
749    portion is bs*nghost
750 
751    Level: advanced
752 
753    Concepts: vectors^ghosted
754 
755 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
756           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
757           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
758 
759 @*/
760 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
761 {
762   PetscErrorCode ierr;
763 
764   PetscFunctionBegin;
765   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
766   PetscFunctionReturn(0);
767 }
768 
769 /*
770     These introduce a ghosted vector where the ghosting is determined by the call to
771   VecSetLocalToGlobalMapping()
772 */
773 
774 #undef __FUNCT__
775 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
776 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
777 {
778   PetscErrorCode ierr;
779   Vec_MPI        *v = (Vec_MPI *)vv->data;
780 
781   PetscFunctionBegin;
782   v->nghost = map->n - vv->n;
783 
784   /* we need to make longer the array space that was allocated when the vector was created */
785   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
786   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
787   v->array = v->array_allocated;
788 
789   /* Create local representation */
790   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
791   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
792   PetscFunctionReturn(0);
793 }
794 
795 
796 #undef __FUNCT__
797 #define __FUNCT__ "VecSetValuesLocal_FETI"
798 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
799 {
800   PetscErrorCode ierr;
801   Vec_MPI        *v = (Vec_MPI *)vv->data;
802 
803   PetscFunctionBegin;
804   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
805   PetscFunctionReturn(0);
806 }
807 
808 EXTERN_C_BEGIN
809 #undef __FUNCT__
810 #define __FUNCT__ "VecCreate_FETI"
811 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
812 {
813   PetscErrorCode ierr;
814 
815   PetscFunctionBegin;
816   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
817 
818   /* overwrite the functions to handle setting values locally */
819   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
820   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
821   vv->ops->assemblybegin           = 0;
822   vv->ops->assemblyend             = 0;
823   vv->ops->setvaluesblocked        = 0;
824   vv->ops->setvaluesblocked        = 0;
825 
826   PetscFunctionReturn(0);
827 }
828 EXTERN_C_END
829 
830 
831 
832 
833 
834 
835 
836 
837 
838