xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 66f49036a02e7cc1eea66e354739e0ca727bf019)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8        Note this code is very similar to VecPublish_Seq()
9 */
10 #undef __FUNCT__
11 #define __FUNCT__ "VecPublish_MPI"
12 static PetscErrorCode VecPublish_MPI(PetscObject obj)
13 {
14   PetscFunctionBegin;
15   PetscFunctionReturn(0);
16 }
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "VecDot_MPI"
20 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
28   *z = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "VecTDot_MPI"
34 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
35 {
36   PetscScalar    sum,work;
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
41   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
42   *z   = sum;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "VecSetOption_MPI"
48 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
49 {
50   PetscFunctionBegin;
51   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
52     v->stash.donotstash = PETSC_TRUE;
53   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
54     v->stash.donotstash = PETSC_FALSE;
55   }
56   PetscFunctionReturn(0);
57 }
58 
59 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
60 EXTERN_C_BEGIN
61 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
62 EXTERN_C_END
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "VecPlaceArray_MPI"
66 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
67 {
68   PetscErrorCode ierr;
69   Vec_MPI        *v = (Vec_MPI *)vin->data;
70 
71   PetscFunctionBegin;
72   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
73   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
74   v->array = (PetscScalar *)a;
75   if (v->localrep) {
76     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
77   }
78   PetscFunctionReturn(0);
79 }
80 
81 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
82 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
83 
84 static struct _VecOps DvOps = { VecDuplicate_MPI,
85             VecDuplicateVecs_Default,
86             VecDestroyVecs_Default,
87             VecDot_MPI,
88             VecMDot_MPI,
89             VecNorm_MPI,
90             VecTDot_MPI,
91             VecMTDot_MPI,
92             VecScale_Seq,
93             VecCopy_Seq,
94             VecSet_Seq,
95             VecSwap_Seq,
96             VecAXPY_Seq,
97             VecAXPBY_Seq,
98             VecMAXPY_Seq,
99             VecAYPX_Seq,
100             VecWAXPY_Seq,
101             VecPointwiseMult_Seq,
102             VecPointwiseDivide_Seq,
103             VecSetValues_MPI,
104             VecAssemblyBegin_MPI,
105             VecAssemblyEnd_MPI,
106             VecGetArray_Seq,
107             VecGetSize_MPI,
108             VecGetSize_Seq,
109             VecRestoreArray_Seq,
110             VecMax_MPI,
111             VecMin_MPI,
112             VecSetRandom_Seq,
113             VecSetOption_MPI,
114             VecSetValuesBlocked_MPI,
115             VecDestroy_MPI,
116             VecView_MPI,
117             VecPlaceArray_MPI,
118             VecReplaceArray_Seq,
119             VecDot_Seq,
120             VecTDot_Seq,
121             VecNorm_Seq,
122             VecMDot_Seq,
123             VecMTDot_Seq,
124             VecLoadIntoVector_Default,
125             VecReciprocal_Default,
126             0, /* VecViewNative... */
127             VecConjugate_Seq,
128             0,
129             0,
130             VecResetArray_Seq,
131             0,
132             VecMaxPointwiseDivide_Seq,
133             VecLoad_Binary,
134             VecPointwiseMax_Seq,
135             VecPointwiseMaxAbs_Seq,
136             VecPointwiseMin_Seq,
137             VecGetValues_MPI};
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "VecCreate_MPI_Private"
141 /*
142     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
143     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
144     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
145 */
146 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[])
147 {
148   Vec_MPI        *s;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152 
153   v->bops->publish   = VecPublish_MPI;
154   ierr = PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->map.n+nghost+1)*sizeof(PetscScalar));CHKERRQ(ierr);
155   ierr           = PetscNew(Vec_MPI,&s);CHKERRQ(ierr);
156   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
157   v->data        = (void*)s;
158   s->nghost      = nghost;
159   v->mapping     = 0;
160   v->bmapping    = 0;
161   v->petscnative = PETSC_TRUE;
162 
163   if (v->map.bs == -1) v->map.bs = 1;
164   ierr = PetscMapInitialize(v->comm,&v->map);CHKERRQ(ierr);
165   if (array) {
166     s->array           = (PetscScalar *)array;
167     s->array_allocated = 0;
168   } else {
169     PetscInt n         = v->map.n+nghost;
170     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
171     s->array_allocated = s->array;
172     ierr               = PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));CHKERRQ(ierr);
173   }
174 
175   /* By default parallel vectors do not have local representation */
176   s->localrep    = 0;
177   s->localupdate = 0;
178 
179   v->stash.insertmode  = NOT_SET_VALUES;
180   /* create the stashes. The block-size for bstash is set later when
181      VecSetValuesBlocked is called.
182   */
183   ierr = VecStashCreate_Private(v->comm,1,&v->stash);CHKERRQ(ierr);
184   ierr = VecStashCreate_Private(v->comm,v->map.bs,&v->bstash);CHKERRQ(ierr);
185 
186 #if defined(PETSC_HAVE_MATLAB)
187   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
188   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
189 #endif
190   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
191   ierr = PetscPublishAll(v);CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 
195 /*MC
196    VECMPI - VECMPI = "mpi" - The basic parallel vector
197 
198    Options Database Keys:
199 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
200 
201   Level: beginner
202 
203 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
204 M*/
205 
206 EXTERN_C_BEGIN
207 #undef __FUNCT__
208 #define __FUNCT__ "VecCreate_MPI"
209 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
210 {
211   PetscErrorCode ierr;
212 
213   PetscFunctionBegin;
214   ierr = VecCreate_MPI_Private(vv,0,0);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 EXTERN_C_END
218 
219 #undef __FUNCT__
220 #define __FUNCT__ "VecCreateMPIWithArray"
221 /*@C
222    VecCreateMPIWithArray - Creates a parallel, array-style vector,
223    where the user provides the array space to store the vector values.
224 
225    Collective on MPI_Comm
226 
227    Input Parameters:
228 +  comm  - the MPI communicator to use
229 .  n     - local vector length, cannot be PETSC_DECIDE
230 .  N     - global vector length (or PETSC_DECIDE to have calculated)
231 -  array - the user provided array to store the vector values
232 
233    Output Parameter:
234 .  vv - the vector
235 
236    Notes:
237    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
238    same type as an existing vector.
239 
240    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
241    at a later stage to SET the array for storing the vector values.
242 
243    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
244    The user should not free the array until the vector is destroyed.
245 
246    Level: intermediate
247 
248    Concepts: vectors^creating with array
249 
250 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
251           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
252 
253 @*/
254 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
255 {
256   PetscErrorCode ierr;
257 
258   PetscFunctionBegin;
259   if (n == PETSC_DECIDE) {
260     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
261   }
262   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
263   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
264   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
265   ierr = VecCreate_MPI_Private(*vv,0,array);CHKERRQ(ierr);
266   PetscFunctionReturn(0);
267 }
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "VecGhostGetLocalForm"
271 /*@
272     VecGhostGetLocalForm - Obtains the local ghosted representation of
273     a parallel vector created with VecCreateGhost().
274 
275     Not Collective
276 
277     Input Parameter:
278 .   g - the global vector. Vector must be have been obtained with either
279         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
280 
281     Output Parameter:
282 .   l - the local (ghosted) representation
283 
284     Notes:
285     This routine does not actually update the ghost values, but rather it
286     returns a sequential vector that includes the locations for the ghost
287     values and their current values. The returned vector and the original
288     vector passed in share the same array that contains the actual vector data.
289 
290     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
291     finished using the object.
292 
293     Level: advanced
294 
295    Concepts: vectors^ghost point access
296 
297 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
298 
299 @*/
300 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
301 {
302   PetscErrorCode ierr;
303   PetscTruth     isseq,ismpi;
304 
305   PetscFunctionBegin;
306   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
307   PetscValidPointer(l,2);
308 
309   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
310   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
311   if (ismpi) {
312     Vec_MPI *v  = (Vec_MPI*)g->data;
313     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
314     *l = v->localrep;
315   } else if (isseq) {
316     *l = g;
317   } else {
318     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
319   }
320   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 #undef __FUNCT__
325 #define __FUNCT__ "VecGhostRestoreLocalForm"
326 /*@
327     VecGhostRestoreLocalForm - Restores the local ghosted representation of
328     a parallel vector obtained with VecGhostGetLocalForm().
329 
330     Not Collective
331 
332     Input Parameter:
333 +   g - the global vector
334 -   l - the local (ghosted) representation
335 
336     Notes:
337     This routine does not actually update the ghost values, but rather it
338     returns a sequential vector that includes the locations for the ghost values
339     and their current values.
340 
341     Level: advanced
342 
343 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
344 @*/
345 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
346 {
347   PetscFunctionBegin;
348   PetscObjectDereference((PetscObject)*l);
349   PetscFunctionReturn(0);
350 }
351 
352 #undef __FUNCT__
353 #define __FUNCT__ "VecGhostUpdateBegin"
354 /*@
355    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
356    local representation to global or global representation to local.
357 
358    Collective on Vec
359 
360    Input Parameters:
361 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
362 .  insertmode - one of ADD_VALUES or INSERT_VALUES
363 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
364 
365    Notes:
366    Use the following to update the ghost regions with correct values from the owning process
367 .vb
368        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
369        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
370 .ve
371 
372    Use the following to accumulate the ghost region values onto the owning processors
373 .vb
374        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
375        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
376 .ve
377 
378    To accumulate the ghost region values onto the owning processors and then update
379    the ghost regions correctly, call the later followed by the former, i.e.,
380 .vb
381        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
382        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
383        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
384        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
385 .ve
386 
387    Level: advanced
388 
389 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
390           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
391 
392 @*/
393 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
394 {
395   Vec_MPI        *v;
396   PetscErrorCode ierr;
397 
398   PetscFunctionBegin;
399   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
400 
401   v  = (Vec_MPI*)g->data;
402   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
403   if (!v->localupdate) PetscFunctionReturn(0);
404 
405   if (scattermode == SCATTER_REVERSE) {
406     ierr = VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
407   } else {
408     ierr = VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
409   }
410   PetscFunctionReturn(0);
411 }
412 
413 #undef __FUNCT__
414 #define __FUNCT__ "VecGhostUpdateEnd"
415 /*@
416    VecGhostUpdateEnd - End the vector scatter to update the vector from
417    local representation to global or global representation to local.
418 
419    Collective on Vec
420 
421    Input Parameters:
422 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
423 .  insertmode - one of ADD_VALUES or INSERT_VALUES
424 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
425 
426    Notes:
427 
428    Use the following to update the ghost regions with correct values from the owning process
429 .vb
430        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
431        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
432 .ve
433 
434    Use the following to accumulate the ghost region values onto the owning processors
435 .vb
436        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
437        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
438 .ve
439 
440    To accumulate the ghost region values onto the owning processors and then update
441    the ghost regions correctly, call the later followed by the former, i.e.,
442 .vb
443        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
444        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
445        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
446        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
447 .ve
448 
449    Level: advanced
450 
451 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
452           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
453 
454 @*/
455 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
456 {
457   Vec_MPI        *v;
458   PetscErrorCode ierr;
459 
460   PetscFunctionBegin;
461   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
462 
463   v  = (Vec_MPI*)g->data;
464   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
465   if (!v->localupdate) PetscFunctionReturn(0);
466 
467   if (scattermode == SCATTER_REVERSE) {
468     ierr = VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
469   } else {
470     ierr = VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "VecCreateGhostWithArray"
477 /*@C
478    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
479    the caller allocates the array space.
480 
481    Collective on MPI_Comm
482 
483    Input Parameters:
484 +  comm - the MPI communicator to use
485 .  n - local vector length
486 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
487 .  nghost - number of local ghost points
488 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
489 -  array - the space to store the vector values (as long as n + nghost)
490 
491    Output Parameter:
492 .  vv - the global vector representation (without ghost points as part of vector)
493 
494    Notes:
495    Use VecGhostGetLocalForm() to access the local, ghosted representation
496    of the vector.
497 
498    This also automatically sets the ISLocalToGlobalMapping() for this vector.
499 
500    Level: advanced
501 
502    Concepts: vectors^creating with array
503    Concepts: vectors^ghosted
504 
505 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
506           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
507           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
508 
509 @*/
510 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
511 {
512   PetscErrorCode         ierr;
513   Vec_MPI                *w;
514   PetscScalar            *larray;
515   IS                     from,to;
516   ISLocalToGlobalMapping ltog;
517   PetscInt               rstart,i,*indices;
518 
519   PetscFunctionBegin;
520   *vv = 0;
521 
522   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
523   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
524   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
525   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
526   /* Create global representation */
527   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
528   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
529   ierr = VecCreate_MPI_Private(*vv,nghost,array);CHKERRQ(ierr);
530   w    = (Vec_MPI *)(*vv)->data;
531   /* Create local representation */
532   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
533   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
534   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
535   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
536 
537   /*
538        Create scatter context for scattering (updating) ghost values
539   */
540   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
541   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
542   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
543   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
544   ierr = ISDestroy(to);CHKERRQ(ierr);
545   ierr = ISDestroy(from);CHKERRQ(ierr);
546 
547   /* set local to global mapping for ghosted vector */
548   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
549   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
550   for (i=0; i<n; i++) {
551     indices[i] = rstart + i;
552   }
553   for (i=0; i<nghost; i++) {
554     indices[n+i] = ghosts[i];
555   }
556   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
557   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
558   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
559   ierr = PetscFree(indices);CHKERRQ(ierr);
560   PetscFunctionReturn(0);
561 }
562 
563 #undef __FUNCT__
564 #define __FUNCT__ "VecCreateGhost"
565 /*@
566    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
567 
568    Collective on MPI_Comm
569 
570    Input Parameters:
571 +  comm - the MPI communicator to use
572 .  n - local vector length
573 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
574 .  nghost - number of local ghost points
575 -  ghosts - global indices of ghost points
576 
577    Output Parameter:
578 .  vv - the global vector representation (without ghost points as part of vector)
579 
580    Notes:
581    Use VecGhostGetLocalForm() to access the local, ghosted representation
582    of the vector.
583 
584    This also automatically sets the ISLocalToGlobalMapping() for this vector.
585 
586    Level: advanced
587 
588    Concepts: vectors^ghosted
589 
590 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
591           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
592           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
593           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
594 
595 @*/
596 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
597 {
598   PetscErrorCode ierr;
599 
600   PetscFunctionBegin;
601   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
602   PetscFunctionReturn(0);
603 }
604 
605 #undef __FUNCT__
606 #define __FUNCT__ "VecDuplicate_MPI"
607 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
608 {
609   PetscErrorCode ierr;
610   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
611   PetscScalar    *array;
612 
613   PetscFunctionBegin;
614   ierr = VecCreate(win->comm,v);CHKERRQ(ierr);
615   ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr);
616   ierr = VecCreate_MPI_Private(*v,w->nghost,0);CHKERRQ(ierr);
617   vw   = (Vec_MPI *)(*v)->data;
618   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
619 
620   /* save local representation of the parallel vector (and scatter) if it exists */
621   if (w->localrep) {
622     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
623     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
624     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
625     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
626     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
627     vw->localupdate = w->localupdate;
628     if (vw->localupdate) {
629       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
630     }
631   }
632 
633   /* New vector should inherit stashing property of parent */
634   (*v)->stash.donotstash = win->stash.donotstash;
635 
636   ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr);
637   ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr);
638   if (win->mapping) {
639     (*v)->mapping = win->mapping;
640     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
641   }
642   if (win->bmapping) {
643     (*v)->bmapping = win->bmapping;
644     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
645   }
646   (*v)->map.bs        = win->map.bs;
647   (*v)->bstash.bs = win->bstash.bs;
648 
649   PetscFunctionReturn(0);
650 }
651 
652 /* ------------------------------------------------------------------------------------------*/
653 #undef __FUNCT__
654 #define __FUNCT__ "VecCreateGhostBlockWithArray"
655 /*@C
656    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
657    the caller allocates the array space. Indices in the ghost region are based on blocks.
658 
659    Collective on MPI_Comm
660 
661    Input Parameters:
662 +  comm - the MPI communicator to use
663 .  bs - block size
664 .  n - local vector length
665 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
666 .  nghost - number of local ghost blocks
667 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
668 -  array - the space to store the vector values (as long as n + nghost*bs)
669 
670    Output Parameter:
671 .  vv - the global vector representation (without ghost points as part of vector)
672 
673    Notes:
674    Use VecGhostGetLocalForm() to access the local, ghosted representation
675    of the vector.
676 
677    n is the local vector size (total local size not the number of blocks) while nghost
678    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
679    portion is bs*nghost
680 
681    Level: advanced
682 
683    Concepts: vectors^creating ghosted
684    Concepts: vectors^creating with array
685 
686 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
687           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
688           VecCreateGhostWithArray(), VecCreateGhostBlocked()
689 
690 @*/
691 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
692 {
693   PetscErrorCode ierr;
694   Vec_MPI        *w;
695   PetscScalar    *larray;
696   IS             from,to;
697 
698   PetscFunctionBegin;
699   *vv = 0;
700 
701   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
702   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
703   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
704   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
705   /* Create global representation */
706   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
707   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
708   ierr = VecCreate_MPI_Private(*vv,nghost*bs,array);CHKERRQ(ierr);
709   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
710   w    = (Vec_MPI *)(*vv)->data;
711   /* Create local representation */
712   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
713   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
714   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
715   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
716   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
717 
718   /*
719        Create scatter context for scattering (updating) ghost values
720   */
721   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
722   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
723   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
724   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
725   ierr = ISDestroy(to);CHKERRQ(ierr);
726   ierr = ISDestroy(from);CHKERRQ(ierr);
727 
728   PetscFunctionReturn(0);
729 }
730 
731 #undef __FUNCT__
732 #define __FUNCT__ "VecCreateGhostBlock"
733 /*@
734    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
735         The indicing of the ghost points is done with blocks.
736 
737    Collective on MPI_Comm
738 
739    Input Parameters:
740 +  comm - the MPI communicator to use
741 .  bs - the block size
742 .  n - local vector length
743 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
744 .  nghost - number of local ghost blocks
745 -  ghosts - global indices of ghost blocks
746 
747    Output Parameter:
748 .  vv - the global vector representation (without ghost points as part of vector)
749 
750    Notes:
751    Use VecGhostGetLocalForm() to access the local, ghosted representation
752    of the vector.
753 
754    n is the local vector size (total local size not the number of blocks) while nghost
755    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
756    portion is bs*nghost
757 
758    Level: advanced
759 
760    Concepts: vectors^ghosted
761 
762 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
763           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
764           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
765 
766 @*/
767 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
768 {
769   PetscErrorCode ierr;
770 
771   PetscFunctionBegin;
772   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
773   PetscFunctionReturn(0);
774 }
775 
776 /*
777     These introduce a ghosted vector where the ghosting is determined by the call to
778   VecSetLocalToGlobalMapping()
779 */
780 
781 #undef __FUNCT__
782 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
783 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
784 {
785   PetscErrorCode ierr;
786   Vec_MPI        *v = (Vec_MPI *)vv->data;
787 
788   PetscFunctionBegin;
789   v->nghost = map->n - vv->map.n;
790 
791   /* we need to make longer the array space that was allocated when the vector was created */
792   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
793   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
794   v->array = v->array_allocated;
795 
796   /* Create local representation */
797   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
798   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
799   PetscFunctionReturn(0);
800 }
801 
802 
803 #undef __FUNCT__
804 #define __FUNCT__ "VecSetValuesLocal_FETI"
805 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
806 {
807   PetscErrorCode ierr;
808   Vec_MPI        *v = (Vec_MPI *)vv->data;
809 
810   PetscFunctionBegin;
811   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
812   PetscFunctionReturn(0);
813 }
814 
815 EXTERN_C_BEGIN
816 #undef __FUNCT__
817 #define __FUNCT__ "VecCreate_FETI"
818 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
819 {
820   PetscErrorCode ierr;
821 
822   PetscFunctionBegin;
823   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
824 
825   /* overwrite the functions to handle setting values locally */
826   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
827   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
828   vv->ops->assemblybegin           = 0;
829   vv->ops->assemblyend             = 0;
830   vv->ops->setvaluesblocked        = 0;
831   vv->ops->setvaluesblocked        = 0;
832 
833   PetscFunctionReturn(0);
834 }
835 EXTERN_C_END
836 
837 
838 
839 
840 
841 
842 
843 
844 
845