xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision a057ae4adee4f3647d9255461b113f7f8283c78f)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #if 0
8 #undef __FUNCT__
9 #define __FUNCT__ "VecPublish_MPI"
10 static PetscErrorCode VecPublish_MPI(PetscObject obj)
11 {
12   PetscFunctionBegin;
13   PetscFunctionReturn(0);
14 }
15 #endif
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "VecDot_MPI"
19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
26   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
27   *z = sum;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "VecTDot_MPI"
33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34 {
35   PetscScalar    sum,work;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
40   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
41   *z   = sum;
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "VecSetOption_MPI"
47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48 {
49   PetscFunctionBegin;
50   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51     v->stash.donotstash = flag;
52   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53     v->stash.ignorenegidx = flag;
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
59 EXTERN_C_BEGIN
60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
61 EXTERN_C_END
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "VecPlaceArray_MPI"
65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66 {
67   PetscErrorCode ierr;
68   Vec_MPI        *v = (Vec_MPI *)vin->data;
69 
70   PetscFunctionBegin;
71   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
73   v->array = (PetscScalar *)a;
74   if (v->localrep) {
75     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
81 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
82 
83 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
84             VecDuplicateVecs_Default,
85             VecDestroyVecs_Default,
86             VecDot_MPI,
87             VecMDot_MPI,
88             VecNorm_MPI,
89             VecTDot_MPI,
90             VecMTDot_MPI,
91             VecScale_Seq,
92             VecCopy_Seq, /* 10 */
93             VecSet_Seq,
94             VecSwap_Seq,
95             VecAXPY_Seq,
96             VecAXPBY_Seq,
97             VecMAXPY_Seq,
98             VecAYPX_Seq,
99             VecWAXPY_Seq,
100             VecPointwiseMult_Seq,
101             VecPointwiseDivide_Seq,
102             VecSetValues_MPI, /* 20 */
103             VecAssemblyBegin_MPI,
104             VecAssemblyEnd_MPI,
105             VecGetArray_Seq,
106             VecGetSize_MPI,
107             VecGetSize_Seq,
108             VecRestoreArray_Seq,
109             VecMax_MPI,
110             VecMin_MPI,
111             VecSetRandom_Seq,
112             VecSetOption_MPI,
113             VecSetValuesBlocked_MPI,
114             VecDestroy_MPI,
115             VecView_MPI,
116             VecPlaceArray_MPI,
117             VecReplaceArray_Seq,
118             VecDot_Seq,
119             VecTDot_Seq,
120             VecNorm_Seq,
121             VecMDot_Seq,
122             VecMTDot_Seq,
123             VecLoadIntoVector_Default,
124             VecReciprocal_Default,
125             0, /* VecViewNative... */
126             VecConjugate_Seq,
127             0,
128             0,
129             VecResetArray_Seq,
130             0,
131             VecMaxPointwiseDivide_Seq,
132             VecLoad_Binary,
133             VecPointwiseMax_Seq,
134             VecPointwiseMaxAbs_Seq,
135             VecPointwiseMin_Seq,
136             VecGetValues_MPI};
137 
138 #undef __FUNCT__
139 #define __FUNCT__ "VecCreate_MPI_Private"
140 /*
141     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
142     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
143     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
144 
145     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
146     no space is allocated.
147 */
148 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
149 {
150   Vec_MPI        *s;
151   PetscErrorCode ierr;
152 
153   PetscFunctionBegin;
154 
155   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
156   v->data        = (void*)s;
157   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
158   s->nghost      = nghost;
159   v->mapping     = 0;
160   v->bmapping    = 0;
161   v->petscnative = PETSC_TRUE;
162 
163   if (v->map.bs == -1) v->map.bs = 1;
164   ierr = PetscMapSetUp(&v->map);CHKERRQ(ierr);
165   s->array           = (PetscScalar *)array;
166   s->array_allocated = 0;
167   if (alloc) {
168     PetscInt n         = v->map.n+nghost;
169     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
170     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
171     ierr               = PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));CHKERRQ(ierr);
172     s->array_allocated = s->array;
173   }
174 
175   /* By default parallel vectors do not have local representation */
176   s->localrep    = 0;
177   s->localupdate = 0;
178 
179   v->stash.insertmode  = NOT_SET_VALUES;
180   /* create the stashes. The block-size for bstash is set later when
181      VecSetValuesBlocked is called.
182   */
183   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
184   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map.bs,&v->bstash);CHKERRQ(ierr);
185 
186 #if defined(PETSC_HAVE_MATLAB_ENGINE)
187   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
188   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
189 #endif
190   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
191   ierr = PetscPublishAll(v);CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 
195 /*MC
196    VECMPI - VECMPI = "mpi" - The basic parallel vector
197 
198    Options Database Keys:
199 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
200 
201   Level: beginner
202 
203 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
204 M*/
205 
206 EXTERN_C_BEGIN
207 #undef __FUNCT__
208 #define __FUNCT__ "VecCreate_MPI"
209 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
210 {
211   PetscErrorCode ierr;
212 
213   PetscFunctionBegin;
214   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 EXTERN_C_END
218 
219 #undef __FUNCT__
220 #define __FUNCT__ "VecCreateMPIWithArray"
221 /*@C
222    VecCreateMPIWithArray - Creates a parallel, array-style vector,
223    where the user provides the array space to store the vector values.
224 
225    Collective on MPI_Comm
226 
227    Input Parameters:
228 +  comm  - the MPI communicator to use
229 .  n     - local vector length, cannot be PETSC_DECIDE
230 .  N     - global vector length (or PETSC_DECIDE to have calculated)
231 -  array - the user provided array to store the vector values
232 
233    Output Parameter:
234 .  vv - the vector
235 
236    Notes:
237    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
238    same type as an existing vector.
239 
240    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
241    at a later stage to SET the array for storing the vector values.
242 
243    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
244    The user should not free the array until the vector is destroyed.
245 
246    Level: intermediate
247 
248    Concepts: vectors^creating with array
249 
250 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
251           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
252 
253 @*/
254 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
255 {
256   PetscErrorCode ierr;
257 
258   PetscFunctionBegin;
259   if (n == PETSC_DECIDE) {
260     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
261   }
262   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
263   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
264   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
265   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
266   PetscFunctionReturn(0);
267 }
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "VecGhostGetLocalForm"
271 /*@
272     VecGhostGetLocalForm - Obtains the local ghosted representation of
273     a parallel vector created with VecCreateGhost().
274 
275     Not Collective
276 
277     Input Parameter:
278 .   g - the global vector. Vector must be have been obtained with either
279         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
280 
281     Output Parameter:
282 .   l - the local (ghosted) representation
283 
284     Notes:
285     This routine does not actually update the ghost values, but rather it
286     returns a sequential vector that includes the locations for the ghost
287     values and their current values. The returned vector and the original
288     vector passed in share the same array that contains the actual vector data.
289 
290     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
291     finished using the object.
292 
293     Level: advanced
294 
295    Concepts: vectors^ghost point access
296 
297 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
298 
299 @*/
300 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
301 {
302   PetscErrorCode ierr;
303   PetscTruth     isseq,ismpi;
304 
305   PetscFunctionBegin;
306   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
307   PetscValidPointer(l,2);
308 
309   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
310   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
311   if (ismpi) {
312     Vec_MPI *v  = (Vec_MPI*)g->data;
313     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
314     *l = v->localrep;
315   } else if (isseq) {
316     *l = g;
317   } else {
318     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
319   }
320   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 #undef __FUNCT__
325 #define __FUNCT__ "VecGhostRestoreLocalForm"
326 /*@
327     VecGhostRestoreLocalForm - Restores the local ghosted representation of
328     a parallel vector obtained with VecGhostGetLocalForm().
329 
330     Not Collective
331 
332     Input Parameter:
333 +   g - the global vector
334 -   l - the local (ghosted) representation
335 
336     Notes:
337     This routine does not actually update the ghost values, but rather it
338     returns a sequential vector that includes the locations for the ghost values
339     and their current values.
340 
341     Level: advanced
342 
343 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
344 @*/
345 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
346 {
347   PetscFunctionBegin;
348   PetscObjectDereference((PetscObject)*l);
349   PetscFunctionReturn(0);
350 }
351 
352 #undef __FUNCT__
353 #define __FUNCT__ "VecGhostUpdateBegin"
354 /*@
355    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
356    local representation to global or global representation to local.
357 
358    Collective on Vec
359 
360    Input Parameters:
361 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
362 .  insertmode - one of ADD_VALUES or INSERT_VALUES
363 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
364 
365    Notes:
366    Use the following to update the ghost regions with correct values from the owning process
367 .vb
368        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
369        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
370 .ve
371 
372    Use the following to accumulate the ghost region values onto the owning processors
373 .vb
374        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
375        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
376 .ve
377 
378    To accumulate the ghost region values onto the owning processors and then update
379    the ghost regions correctly, call the later followed by the former, i.e.,
380 .vb
381        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
382        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
383        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
384        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
385 .ve
386 
387    Level: advanced
388 
389 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
390           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
391 
392 @*/
393 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
394 {
395   Vec_MPI        *v;
396   PetscErrorCode ierr;
397 
398   PetscFunctionBegin;
399   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
400 
401   v  = (Vec_MPI*)g->data;
402   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
403   if (!v->localupdate) PetscFunctionReturn(0);
404 
405   if (scattermode == SCATTER_REVERSE) {
406     ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
407   } else {
408     ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
409   }
410   PetscFunctionReturn(0);
411 }
412 
413 #undef __FUNCT__
414 #define __FUNCT__ "VecGhostUpdateEnd"
415 /*@
416    VecGhostUpdateEnd - End the vector scatter to update the vector from
417    local representation to global or global representation to local.
418 
419    Collective on Vec
420 
421    Input Parameters:
422 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
423 .  insertmode - one of ADD_VALUES or INSERT_VALUES
424 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
425 
426    Notes:
427 
428    Use the following to update the ghost regions with correct values from the owning process
429 .vb
430        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
431        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
432 .ve
433 
434    Use the following to accumulate the ghost region values onto the owning processors
435 .vb
436        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
437        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
438 .ve
439 
440    To accumulate the ghost region values onto the owning processors and then update
441    the ghost regions correctly, call the later followed by the former, i.e.,
442 .vb
443        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
444        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
445        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
446        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
447 .ve
448 
449    Level: advanced
450 
451 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
452           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
453 
454 @*/
455 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
456 {
457   Vec_MPI        *v;
458   PetscErrorCode ierr;
459 
460   PetscFunctionBegin;
461   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
462 
463   v  = (Vec_MPI*)g->data;
464   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
465   if (!v->localupdate) PetscFunctionReturn(0);
466 
467   if (scattermode == SCATTER_REVERSE) {
468     ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
469   } else {
470     ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "VecCreateGhostWithArray"
477 /*@C
478    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
479    the caller allocates the array space.
480 
481    Collective on MPI_Comm
482 
483    Input Parameters:
484 +  comm - the MPI communicator to use
485 .  n - local vector length
486 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
487 .  nghost - number of local ghost points
488 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
489 -  array - the space to store the vector values (as long as n + nghost)
490 
491    Output Parameter:
492 .  vv - the global vector representation (without ghost points as part of vector)
493 
494    Notes:
495    Use VecGhostGetLocalForm() to access the local, ghosted representation
496    of the vector.
497 
498    This also automatically sets the ISLocalToGlobalMapping() for this vector.
499 
500    Level: advanced
501 
502    Concepts: vectors^creating with array
503    Concepts: vectors^ghosted
504 
505 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
506           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
507           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
508 
509 @*/
510 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
511 {
512   PetscErrorCode         ierr;
513   Vec_MPI                *w;
514   PetscScalar            *larray;
515   IS                     from,to;
516   ISLocalToGlobalMapping ltog;
517   PetscInt               rstart,i,*indices;
518 
519   PetscFunctionBegin;
520   *vv = 0;
521 
522   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
523   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
524   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
525   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
526   /* Create global representation */
527   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
528   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
529   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
530   w    = (Vec_MPI *)(*vv)->data;
531   /* Create local representation */
532   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
533   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
534   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
535   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
536 
537   /*
538        Create scatter context for scattering (updating) ghost values
539   */
540   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
541   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
542   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
543   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
544   ierr = ISDestroy(to);CHKERRQ(ierr);
545   ierr = ISDestroy(from);CHKERRQ(ierr);
546 
547   /* set local to global mapping for ghosted vector */
548   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
549   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
550   for (i=0; i<n; i++) {
551     indices[i] = rstart + i;
552   }
553   for (i=0; i<nghost; i++) {
554     indices[n+i] = ghosts[i];
555   }
556   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
557   ierr = PetscFree(indices);CHKERRQ(ierr);
558   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
559   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
560   ierr = PetscFree(indices);CHKERRQ(ierr);
561   PetscFunctionReturn(0);
562 }
563 
564 #undef __FUNCT__
565 #define __FUNCT__ "VecCreateGhost"
566 /*@
567    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
568 
569    Collective on MPI_Comm
570 
571    Input Parameters:
572 +  comm - the MPI communicator to use
573 .  n - local vector length
574 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
575 .  nghost - number of local ghost points
576 -  ghosts - global indices of ghost points
577 
578    Output Parameter:
579 .  vv - the global vector representation (without ghost points as part of vector)
580 
581    Notes:
582    Use VecGhostGetLocalForm() to access the local, ghosted representation
583    of the vector.
584 
585    This also automatically sets the ISLocalToGlobalMapping() for this vector.
586 
587    Level: advanced
588 
589    Concepts: vectors^ghosted
590 
591 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
592           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
593           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
594           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
595 
596 @*/
597 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
598 {
599   PetscErrorCode ierr;
600 
601   PetscFunctionBegin;
602   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "VecDuplicate_MPI"
608 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
609 {
610   PetscErrorCode ierr;
611   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
612   PetscScalar    *array;
613 
614   PetscFunctionBegin;
615   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
616   ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr);
617   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
618   vw   = (Vec_MPI *)(*v)->data;
619   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
620 
621   /* save local representation of the parallel vector (and scatter) if it exists */
622   if (w->localrep) {
623     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
624     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
625     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
626     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
627     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
628     vw->localupdate = w->localupdate;
629     if (vw->localupdate) {
630       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
631     }
632   }
633 
634   /* New vector should inherit stashing property of parent */
635   (*v)->stash.donotstash = win->stash.donotstash;
636   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
637 
638   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
639   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
640   if (win->mapping) {
641     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
642     (*v)->mapping = win->mapping;
643   }
644   if (win->bmapping) {
645     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
646     (*v)->bmapping = win->bmapping;
647   }
648   (*v)->map.bs    = win->map.bs;
649   (*v)->bstash.bs = win->bstash.bs;
650 
651   PetscFunctionReturn(0);
652 }
653 
654 /* ------------------------------------------------------------------------------------------*/
655 #undef __FUNCT__
656 #define __FUNCT__ "VecCreateGhostBlockWithArray"
657 /*@C
658    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
659    the caller allocates the array space. Indices in the ghost region are based on blocks.
660 
661    Collective on MPI_Comm
662 
663    Input Parameters:
664 +  comm - the MPI communicator to use
665 .  bs - block size
666 .  n - local vector length
667 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
668 .  nghost - number of local ghost blocks
669 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
670 -  array - the space to store the vector values (as long as n + nghost*bs)
671 
672    Output Parameter:
673 .  vv - the global vector representation (without ghost points as part of vector)
674 
675    Notes:
676    Use VecGhostGetLocalForm() to access the local, ghosted representation
677    of the vector.
678 
679    n is the local vector size (total local size not the number of blocks) while nghost
680    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
681    portion is bs*nghost
682 
683    Level: advanced
684 
685    Concepts: vectors^creating ghosted
686    Concepts: vectors^creating with array
687 
688 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
689           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
690           VecCreateGhostWithArray(), VecCreateGhostBlocked()
691 
692 @*/
693 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
694 {
695   PetscErrorCode ierr;
696   Vec_MPI        *w;
697   PetscScalar    *larray;
698   IS             from,to;
699 
700   PetscFunctionBegin;
701   *vv = 0;
702 
703   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
704   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
705   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
706   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
707   /* Create global representation */
708   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
709   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
710   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,nghost*bs,array);CHKERRQ(ierr);
711   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
712   w    = (Vec_MPI *)(*vv)->data;
713   /* Create local representation */
714   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
715   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
716   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
717   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
718   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
719 
720   /*
721        Create scatter context for scattering (updating) ghost values
722   */
723   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
724   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
725   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
726   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
727   ierr = ISDestroy(to);CHKERRQ(ierr);
728   ierr = ISDestroy(from);CHKERRQ(ierr);
729 
730   PetscFunctionReturn(0);
731 }
732 
733 #undef __FUNCT__
734 #define __FUNCT__ "VecCreateGhostBlock"
735 /*@
736    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
737         The indicing of the ghost points is done with blocks.
738 
739    Collective on MPI_Comm
740 
741    Input Parameters:
742 +  comm - the MPI communicator to use
743 .  bs - the block size
744 .  n - local vector length
745 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
746 .  nghost - number of local ghost blocks
747 -  ghosts - global indices of ghost blocks
748 
749    Output Parameter:
750 .  vv - the global vector representation (without ghost points as part of vector)
751 
752    Notes:
753    Use VecGhostGetLocalForm() to access the local, ghosted representation
754    of the vector.
755 
756    n is the local vector size (total local size not the number of blocks) while nghost
757    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
758    portion is bs*nghost
759 
760    Level: advanced
761 
762    Concepts: vectors^ghosted
763 
764 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
765           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
766           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
767 
768 @*/
769 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
770 {
771   PetscErrorCode ierr;
772 
773   PetscFunctionBegin;
774   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
775   PetscFunctionReturn(0);
776 }
777 
778 /*
779     These introduce a ghosted vector where the ghosting is determined by the call to
780   VecSetLocalToGlobalMapping()
781 */
782 
783 #undef __FUNCT__
784 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
785 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
786 {
787   PetscErrorCode ierr;
788   Vec_MPI        *v = (Vec_MPI *)vv->data;
789 
790   PetscFunctionBegin;
791   v->nghost = map->n - vv->map.n;
792 
793   /* we need to make longer the array space that was allocated when the vector was created */
794   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
795   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
796   v->array = v->array_allocated;
797 
798   /* Create local representation */
799   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
800   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
801   PetscFunctionReturn(0);
802 }
803 
804 
805 #undef __FUNCT__
806 #define __FUNCT__ "VecSetValuesLocal_FETI"
807 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
808 {
809   PetscErrorCode ierr;
810   Vec_MPI        *v = (Vec_MPI *)vv->data;
811 
812   PetscFunctionBegin;
813   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
814   PetscFunctionReturn(0);
815 }
816 
817 EXTERN_C_BEGIN
818 #undef __FUNCT__
819 #define __FUNCT__ "VecCreate_FETI"
820 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
821 {
822   PetscErrorCode ierr;
823 
824   PetscFunctionBegin;
825   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
826 
827   /* overwrite the functions to handle setting values locally */
828   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
829   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
830   vv->ops->assemblybegin           = 0;
831   vv->ops->assemblyend             = 0;
832   vv->ops->setvaluesblocked        = 0;
833   vv->ops->setvaluesblocked        = 0;
834 
835   PetscFunctionReturn(0);
836 }
837 EXTERN_C_END
838 
839 
840 
841 
842 
843 
844 
845 
846 
847