xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision f39d8e23b40eadf790a9090b651a5e6aab3feb96)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8        Note this code is very similar to VecPublish_Seq()
9 */
10 #undef __FUNCT__
11 #define __FUNCT__ "VecPublish_MPI"
12 static PetscErrorCode VecPublish_MPI(PetscObject obj)
13 {
14   PetscFunctionBegin;
15   PetscFunctionReturn(0);
16 }
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "VecDot_MPI"
20 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
28   *z = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "VecTDot_MPI"
34 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
35 {
36   PetscScalar    sum,work;
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
41   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
42   *z   = sum;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "VecSetOption_MPI"
48 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
49 {
50   PetscFunctionBegin;
51   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
52     v->stash.donotstash = PETSC_TRUE;
53   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
54     v->stash.donotstash = PETSC_FALSE;
55   }
56   PetscFunctionReturn(0);
57 }
58 
59 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
60 EXTERN_C_BEGIN
61 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
62 EXTERN_C_END
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "VecPlaceArray_MPI"
66 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
67 {
68   PetscErrorCode ierr;
69   Vec_MPI        *v = (Vec_MPI *)vin->data;
70 
71   PetscFunctionBegin;
72   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
73   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
74   v->array = (PetscScalar *)a;
75   if (v->localrep) {
76     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
77   }
78   PetscFunctionReturn(0);
79 }
80 
81 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
82 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
83 
84 static struct _VecOps DvOps = { VecDuplicate_MPI,
85             VecDuplicateVecs_Default,
86             VecDestroyVecs_Default,
87             VecDot_MPI,
88             VecMDot_MPI,
89             VecNorm_MPI,
90             VecTDot_MPI,
91             VecMTDot_MPI,
92             VecScale_Seq,
93             VecCopy_Seq,
94             VecSet_Seq,
95             VecSwap_Seq,
96             VecAXPY_Seq,
97             VecAXPBY_Seq,
98             VecMAXPY_Seq,
99             VecAYPX_Seq,
100             VecWAXPY_Seq,
101             VecPointwiseMult_Seq,
102             VecPointwiseDivide_Seq,
103             VecSetValues_MPI,
104             VecAssemblyBegin_MPI,
105             VecAssemblyEnd_MPI,
106             VecGetArray_Seq,
107             VecGetSize_MPI,
108             VecGetSize_Seq,
109             VecRestoreArray_Seq,
110             VecMax_MPI,
111             VecMin_MPI,
112             VecSetRandom_Seq,
113             VecSetOption_MPI,
114             VecSetValuesBlocked_MPI,
115             VecDestroy_MPI,
116             VecView_MPI,
117             VecPlaceArray_MPI,
118             VecReplaceArray_Seq,
119             VecDot_Seq,
120             VecTDot_Seq,
121             VecNorm_Seq,
122             VecLoadIntoVector_Default,
123             VecReciprocal_Default,
124             0, /* VecViewNative... */
125             VecConjugate_Seq,
126             0,
127             0,
128             VecResetArray_Seq,
129             0,
130             VecMaxPointwiseDivide_Seq,
131             VecLoad_Binary,
132             VecPointwiseMax_Seq,
133             VecPointwiseMaxAbs_Seq,
134             VecPointwiseMin_Seq,
135             VecGetValues_MPI};
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "VecCreate_MPI_Private"
139 /*
140     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
141     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
142     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
143 */
144 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[],PetscMap map)
145 {
146   Vec_MPI        *s;
147   PetscErrorCode ierr;
148   PetscMPIInt    size,rank;
149 
150   PetscFunctionBegin;
151   ierr = MPI_Comm_size(v->comm,&size);CHKERRQ(ierr);
152   ierr = MPI_Comm_rank(v->comm,&rank);CHKERRQ(ierr);
153 
154   v->bops->publish   = VecPublish_MPI;
155   ierr = PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));CHKERRQ(ierr);
156   ierr           = PetscNew(Vec_MPI,&s);CHKERRQ(ierr);
157   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
158   v->data        = (void*)s;
159   s->nghost      = nghost;
160   v->mapping     = 0;
161   v->bmapping    = 0;
162   v->petscnative = PETSC_TRUE;
163 
164   if (array) {
165     s->array           = (PetscScalar *)array;
166     s->array_allocated = 0;
167   } else {
168     PetscInt n         = v->n+nghost;
169     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
170     s->array_allocated = s->array;
171     ierr               = PetscMemzero(s->array,v->n*sizeof(PetscScalar));CHKERRQ(ierr);
172   }
173 
174   /* By default parallel vectors do not have local representation */
175   s->localrep    = 0;
176   s->localupdate = 0;
177 
178   v->stash.insertmode  = NOT_SET_VALUES;
179 
180   if (!v->map) {
181     if (!map) {
182       ierr = PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);CHKERRQ(ierr);
183     } else {
184       v->map = map;
185       ierr = PetscObjectReference((PetscObject)map);CHKERRQ(ierr);
186     }
187   }
188   /* create the stashes. The block-size for bstash is set later when
189      VecSetValuesBlocked is called.
190   */
191   ierr = VecStashCreate_Private(v->comm,1,&v->stash);CHKERRQ(ierr);
192   ierr = VecStashCreate_Private(v->comm,v->bs,&v->bstash);CHKERRQ(ierr);
193 
194 #if defined(PETSC_HAVE_MATLAB)
195   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
196   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
197 #endif
198   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
199   ierr = PetscPublishAll(v);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 /*MC
204    VECMPI - VECMPI = "mpi" - The basic parallel vector
205 
206    Options Database Keys:
207 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
208 
209   Level: beginner
210 
211 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
212 M*/
213 
214 EXTERN_C_BEGIN
215 #undef __FUNCT__
216 #define __FUNCT__ "VecCreate_MPI"
217 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
218 {
219   PetscErrorCode ierr;
220 
221   PetscFunctionBegin;
222   if (vv->bs > 0) {
223     ierr = PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);CHKERRQ(ierr);
224   } else {
225     ierr = PetscSplitOwnership(vv->comm,&vv->n,&vv->N);CHKERRQ(ierr);
226   }
227   ierr = VecCreate_MPI_Private(vv,0,0,PETSC_NULL);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 EXTERN_C_END
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "VecCreateMPIWithArray"
234 /*@C
235    VecCreateMPIWithArray - Creates a parallel, array-style vector,
236    where the user provides the array space to store the vector values.
237 
238    Collective on MPI_Comm
239 
240    Input Parameters:
241 +  comm  - the MPI communicator to use
242 .  n     - local vector length, cannot be PETSC_DECIDE
243 .  N     - global vector length (or PETSC_DECIDE to have calculated)
244 -  array - the user provided array to store the vector values
245 
246    Output Parameter:
247 .  vv - the vector
248 
249    Notes:
250    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
251    same type as an existing vector.
252 
253    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
254    at a later stage to SET the array for storing the vector values.
255 
256    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
257    The user should not free the array until the vector is destroyed.
258 
259    Level: intermediate
260 
261    Concepts: vectors^creating with array
262 
263 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
264           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
265 
266 @*/
267 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
268 {
269   PetscErrorCode ierr;
270 
271   PetscFunctionBegin;
272   if (n == PETSC_DECIDE) {
273     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
274   }
275   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
276   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
277   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
278   ierr = VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);CHKERRQ(ierr);
279   PetscFunctionReturn(0);
280 }
281 
282 #undef __FUNCT__
283 #define __FUNCT__ "VecGhostGetLocalForm"
284 /*@
285     VecGhostGetLocalForm - Obtains the local ghosted representation of
286     a parallel vector created with VecCreateGhost().
287 
288     Not Collective
289 
290     Input Parameter:
291 .   g - the global vector. Vector must be have been obtained with either
292         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
293 
294     Output Parameter:
295 .   l - the local (ghosted) representation
296 
297     Notes:
298     This routine does not actually update the ghost values, but rather it
299     returns a sequential vector that includes the locations for the ghost
300     values and their current values. The returned vector and the original
301     vector passed in share the same array that contains the actual vector data.
302 
303     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
304     finished using the object.
305 
306     Level: advanced
307 
308    Concepts: vectors^ghost point access
309 
310 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
311 
312 @*/
313 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
314 {
315   PetscErrorCode ierr;
316   PetscTruth     isseq,ismpi;
317 
318   PetscFunctionBegin;
319   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
320   PetscValidPointer(l,2);
321 
322   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
323   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
324   if (ismpi) {
325     Vec_MPI *v  = (Vec_MPI*)g->data;
326     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
327     *l = v->localrep;
328   } else if (isseq) {
329     *l = g;
330   } else {
331     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
332   }
333   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
334   PetscFunctionReturn(0);
335 }
336 
337 #undef __FUNCT__
338 #define __FUNCT__ "VecGhostRestoreLocalForm"
339 /*@
340     VecGhostRestoreLocalForm - Restores the local ghosted representation of
341     a parallel vector obtained with VecGhostGetLocalForm().
342 
343     Not Collective
344 
345     Input Parameter:
346 +   g - the global vector
347 -   l - the local (ghosted) representation
348 
349     Notes:
350     This routine does not actually update the ghost values, but rather it
351     returns a sequential vector that includes the locations for the ghost values
352     and their current values.
353 
354     Level: advanced
355 
356 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
357 @*/
358 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
359 {
360   PetscFunctionBegin;
361   PetscObjectDereference((PetscObject)*l);
362   PetscFunctionReturn(0);
363 }
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "VecGhostUpdateBegin"
367 /*@
368    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
369    local representation to global or global representation to local.
370 
371    Collective on Vec
372 
373    Input Parameters:
374 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
375 .  insertmode - one of ADD_VALUES or INSERT_VALUES
376 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
377 
378    Notes:
379    Use the following to update the ghost regions with correct values from the owning process
380 .vb
381        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
382        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
383 .ve
384 
385    Use the following to accumulate the ghost region values onto the owning processors
386 .vb
387        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
388        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
389 .ve
390 
391    To accumulate the ghost region values onto the owning processors and then update
392    the ghost regions correctly, call the later followed by the former, i.e.,
393 .vb
394        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
395        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
396        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
397        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
398 .ve
399 
400    Level: advanced
401 
402 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
403           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
404 
405 @*/
406 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
407 {
408   Vec_MPI        *v;
409   PetscErrorCode ierr;
410 
411   PetscFunctionBegin;
412   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
413 
414   v  = (Vec_MPI*)g->data;
415   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
416   if (!v->localupdate) PetscFunctionReturn(0);
417 
418   if (scattermode == SCATTER_REVERSE) {
419     ierr = VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
420   } else {
421     ierr = VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
422   }
423   PetscFunctionReturn(0);
424 }
425 
426 #undef __FUNCT__
427 #define __FUNCT__ "VecGhostUpdateEnd"
428 /*@
429    VecGhostUpdateEnd - End the vector scatter to update the vector from
430    local representation to global or global representation to local.
431 
432    Collective on Vec
433 
434    Input Parameters:
435 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
436 .  insertmode - one of ADD_VALUES or INSERT_VALUES
437 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
438 
439    Notes:
440 
441    Use the following to update the ghost regions with correct values from the owning process
442 .vb
443        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
444        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
445 .ve
446 
447    Use the following to accumulate the ghost region values onto the owning processors
448 .vb
449        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
450        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
451 .ve
452 
453    To accumulate the ghost region values onto the owning processors and then update
454    the ghost regions correctly, call the later followed by the former, i.e.,
455 .vb
456        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
457        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
458        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
459        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
460 .ve
461 
462    Level: advanced
463 
464 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
465           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
466 
467 @*/
468 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
469 {
470   Vec_MPI        *v;
471   PetscErrorCode ierr;
472 
473   PetscFunctionBegin;
474   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
475 
476   v  = (Vec_MPI*)g->data;
477   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
478   if (!v->localupdate) PetscFunctionReturn(0);
479 
480   if (scattermode == SCATTER_REVERSE) {
481     ierr = VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
482   } else {
483     ierr = VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
484   }
485   PetscFunctionReturn(0);
486 }
487 
488 #undef __FUNCT__
489 #define __FUNCT__ "VecCreateGhostWithArray"
490 /*@C
491    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
492    the caller allocates the array space.
493 
494    Collective on MPI_Comm
495 
496    Input Parameters:
497 +  comm - the MPI communicator to use
498 .  n - local vector length
499 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
500 .  nghost - number of local ghost points
501 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
502 -  array - the space to store the vector values (as long as n + nghost)
503 
504    Output Parameter:
505 .  vv - the global vector representation (without ghost points as part of vector)
506 
507    Notes:
508    Use VecGhostGetLocalForm() to access the local, ghosted representation
509    of the vector.
510 
511    Level: advanced
512 
513    Concepts: vectors^creating with array
514    Concepts: vectors^ghosted
515 
516 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
517           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
518           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
519 
520 @*/
521 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
522 {
523   PetscErrorCode ierr;
524   Vec_MPI        *w;
525   PetscScalar    *larray;
526   IS             from,to;
527 
528   PetscFunctionBegin;
529   *vv = 0;
530 
531   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
532   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
533   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
534   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
535   /* Create global representation */
536   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
537   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
538   ierr = VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);CHKERRQ(ierr);
539   w    = (Vec_MPI *)(*vv)->data;
540   /* Create local representation */
541   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
542   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
543   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
544   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
545 
546   /*
547        Create scatter context for scattering (updating) ghost values
548   */
549   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
550   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
551   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
552   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
553   ierr = ISDestroy(to);CHKERRQ(ierr);
554   ierr = ISDestroy(from);CHKERRQ(ierr);
555 
556   PetscFunctionReturn(0);
557 }
558 
559 #undef __FUNCT__
560 #define __FUNCT__ "VecCreateGhost"
561 /*@
562    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
563 
564    Collective on MPI_Comm
565 
566    Input Parameters:
567 +  comm - the MPI communicator to use
568 .  n - local vector length
569 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
570 .  nghost - number of local ghost points
571 -  ghosts - global indices of ghost points
572 
573    Output Parameter:
574 .  vv - the global vector representation (without ghost points as part of vector)
575 
576    Notes:
577    Use VecGhostGetLocalForm() to access the local, ghosted representation
578    of the vector.
579 
580    Level: advanced
581 
582    Concepts: vectors^ghosted
583 
584 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
585           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
586           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
587           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
588 
589 @*/
590 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
591 {
592   PetscErrorCode ierr;
593 
594   PetscFunctionBegin;
595   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
596   PetscFunctionReturn(0);
597 }
598 
599 #undef __FUNCT__
600 #define __FUNCT__ "VecDuplicate_MPI"
601 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
602 {
603   PetscErrorCode ierr;
604   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
605   PetscScalar    *array;
606 
607   PetscFunctionBegin;
608   ierr = VecCreate(win->comm,v);CHKERRQ(ierr);
609   ierr = VecSetSizes(*v,win->n,win->N);CHKERRQ(ierr);
610   ierr = VecCreate_MPI_Private(*v,w->nghost,0,win->map);CHKERRQ(ierr);
611   vw   = (Vec_MPI *)(*v)->data;
612   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
613 
614   /* save local representation of the parallel vector (and scatter) if it exists */
615   if (w->localrep) {
616     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
617     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
618     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
619     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
620     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
621     vw->localupdate = w->localupdate;
622     if (vw->localupdate) {
623       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
624     }
625   }
626 
627   /* New vector should inherit stashing property of parent */
628   (*v)->stash.donotstash = win->stash.donotstash;
629 
630   ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr);
631   ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr);
632   if (win->mapping) {
633     (*v)->mapping = win->mapping;
634     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
635   }
636   if (win->bmapping) {
637     (*v)->bmapping = win->bmapping;
638     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
639   }
640   (*v)->bs        = win->bs;
641   (*v)->bstash.bs = win->bstash.bs;
642 
643   PetscFunctionReturn(0);
644 }
645 
646 /* ------------------------------------------------------------------------------------------*/
647 #undef __FUNCT__
648 #define __FUNCT__ "VecCreateGhostBlockWithArray"
649 /*@C
650    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
651    the caller allocates the array space. Indices in the ghost region are based on blocks.
652 
653    Collective on MPI_Comm
654 
655    Input Parameters:
656 +  comm - the MPI communicator to use
657 .  bs - block size
658 .  n - local vector length
659 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
660 .  nghost - number of local ghost blocks
661 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
662 -  array - the space to store the vector values (as long as n + nghost*bs)
663 
664    Output Parameter:
665 .  vv - the global vector representation (without ghost points as part of vector)
666 
667    Notes:
668    Use VecGhostGetLocalForm() to access the local, ghosted representation
669    of the vector.
670 
671    n is the local vector size (total local size not the number of blocks) while nghost
672    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
673    portion is bs*nghost
674 
675    Level: advanced
676 
677    Concepts: vectors^creating ghosted
678    Concepts: vectors^creating with array
679 
680 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
681           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
682           VecCreateGhostWithArray(), VecCreateGhostBlocked()
683 
684 @*/
685 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
686 {
687   PetscErrorCode ierr;
688   Vec_MPI        *w;
689   PetscScalar    *larray;
690   IS             from,to;
691 
692   PetscFunctionBegin;
693   *vv = 0;
694 
695   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
696   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
697   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
698   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
699   /* Create global representation */
700   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
701   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
702   ierr = VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);CHKERRQ(ierr);
703   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
704   w    = (Vec_MPI *)(*vv)->data;
705   /* Create local representation */
706   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
707   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
708   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
709   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
710   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
711 
712   /*
713        Create scatter context for scattering (updating) ghost values
714   */
715   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
716   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
717   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
718   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
719   ierr = ISDestroy(to);CHKERRQ(ierr);
720   ierr = ISDestroy(from);CHKERRQ(ierr);
721 
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "VecCreateGhostBlock"
727 /*@
728    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
729         The indicing of the ghost points is done with blocks.
730 
731    Collective on MPI_Comm
732 
733    Input Parameters:
734 +  comm - the MPI communicator to use
735 .  bs - the block size
736 .  n - local vector length
737 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
738 .  nghost - number of local ghost blocks
739 -  ghosts - global indices of ghost blocks
740 
741    Output Parameter:
742 .  vv - the global vector representation (without ghost points as part of vector)
743 
744    Notes:
745    Use VecGhostGetLocalForm() to access the local, ghosted representation
746    of the vector.
747 
748    n is the local vector size (total local size not the number of blocks) while nghost
749    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
750    portion is bs*nghost
751 
752    Level: advanced
753 
754    Concepts: vectors^ghosted
755 
756 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
757           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
758           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
759 
760 @*/
761 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
762 {
763   PetscErrorCode ierr;
764 
765   PetscFunctionBegin;
766   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
767   PetscFunctionReturn(0);
768 }
769 
770 /*
771     These introduce a ghosted vector where the ghosting is determined by the call to
772   VecSetLocalToGlobalMapping()
773 */
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
777 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
778 {
779   PetscErrorCode ierr;
780   Vec_MPI        *v = (Vec_MPI *)vv->data;
781 
782   PetscFunctionBegin;
783   v->nghost = map->n - vv->n;
784 
785   /* we need to make longer the array space that was allocated when the vector was created */
786   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
787   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
788   v->array = v->array_allocated;
789 
790   /* Create local representation */
791   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
792   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
793   PetscFunctionReturn(0);
794 }
795 
796 
797 #undef __FUNCT__
798 #define __FUNCT__ "VecSetValuesLocal_FETI"
799 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
800 {
801   PetscErrorCode ierr;
802   Vec_MPI        *v = (Vec_MPI *)vv->data;
803 
804   PetscFunctionBegin;
805   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 EXTERN_C_BEGIN
810 #undef __FUNCT__
811 #define __FUNCT__ "VecCreate_FETI"
812 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
813 {
814   PetscErrorCode ierr;
815 
816   PetscFunctionBegin;
817   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
818 
819   /* overwrite the functions to handle setting values locally */
820   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
821   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
822   vv->ops->assemblybegin           = 0;
823   vv->ops->assemblyend             = 0;
824   vv->ops->setvaluesblocked        = 0;
825   vv->ops->setvaluesblocked        = 0;
826 
827   PetscFunctionReturn(0);
828 }
829 EXTERN_C_END
830 
831 
832 
833 
834 
835 
836 
837 
838 
839