xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 92ff6ae828fe67abd231a335eb38900b8f8c5b4a)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8        Note this code is very similar to VecPublish_Seq()
9 */
10 #undef __FUNCT__
11 #define __FUNCT__ "VecPublish_MPI"
12 static PetscErrorCode VecPublish_MPI(PetscObject obj)
13 {
14   PetscFunctionBegin;
15   PetscFunctionReturn(0);
16 }
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "VecDot_MPI"
20 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
28   *z = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "VecTDot_MPI"
34 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
35 {
36   PetscScalar    sum,work;
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
41   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
42   *z   = sum;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "VecSetOption_MPI"
48 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
49 {
50   PetscFunctionBegin;
51   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
52     v->stash.donotstash = PETSC_TRUE;
53   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
54     v->stash.donotstash = PETSC_FALSE;
55   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
56     v->stash.ignorenegidx = PETSC_TRUE;
57   } else if (op == VEC_TREAT_NEGATIVE_INDICES) {
58     v->stash.ignorenegidx = PETSC_FALSE;
59   }
60   PetscFunctionReturn(0);
61 }
62 
63 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
64 EXTERN_C_BEGIN
65 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
66 EXTERN_C_END
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "VecPlaceArray_MPI"
70 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
71 {
72   PetscErrorCode ierr;
73   Vec_MPI        *v = (Vec_MPI *)vin->data;
74 
75   PetscFunctionBegin;
76   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
77   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
78   v->array = (PetscScalar *)a;
79   if (v->localrep) {
80     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
81   }
82   PetscFunctionReturn(0);
83 }
84 
85 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
86 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
87 
88 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
89             VecDuplicateVecs_Default,
90             VecDestroyVecs_Default,
91             VecDot_MPI,
92             VecMDot_MPI,
93             VecNorm_MPI,
94             VecTDot_MPI,
95             VecMTDot_MPI,
96             VecScale_Seq,
97             VecCopy_Seq, /* 10 */
98             VecSet_Seq,
99             VecSwap_Seq,
100             VecAXPY_Seq,
101             VecAXPBY_Seq,
102             VecMAXPY_Seq,
103             VecAYPX_Seq,
104             VecWAXPY_Seq,
105             VecPointwiseMult_Seq,
106             VecPointwiseDivide_Seq,
107             VecSetValues_MPI, /* 20 */
108             VecAssemblyBegin_MPI,
109             VecAssemblyEnd_MPI,
110             VecGetArray_Seq,
111             VecGetSize_MPI,
112             VecGetSize_Seq,
113             VecRestoreArray_Seq,
114             VecMax_MPI,
115             VecMin_MPI,
116             VecSetRandom_Seq,
117             VecSetOption_MPI,
118             VecSetValuesBlocked_MPI,
119             VecDestroy_MPI,
120             VecView_MPI,
121             VecPlaceArray_MPI,
122             VecReplaceArray_Seq,
123             VecDot_Seq,
124             VecTDot_Seq,
125             VecNorm_Seq,
126             VecMDot_Seq,
127             VecMTDot_Seq,
128             VecLoadIntoVector_Default,
129             VecReciprocal_Default,
130             0, /* VecViewNative... */
131             VecConjugate_Seq,
132             0,
133             0,
134             VecResetArray_Seq,
135             0,
136             VecMaxPointwiseDivide_Seq,
137             VecLoad_Binary,
138             VecPointwiseMax_Seq,
139             VecPointwiseMaxAbs_Seq,
140             VecPointwiseMin_Seq,
141             VecGetValues_MPI};
142 
143 #undef __FUNCT__
144 #define __FUNCT__ "VecCreate_MPI_Private"
145 /*
146     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
147     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
148     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
149 */
150 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[])
151 {
152   Vec_MPI        *s;
153   PetscErrorCode ierr;
154 
155   PetscFunctionBegin;
156 
157   v->bops->publish   = VecPublish_MPI;
158   ierr = PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->map.n+nghost+1)*sizeof(PetscScalar));CHKERRQ(ierr);
159   ierr           = PetscNew(Vec_MPI,&s);CHKERRQ(ierr);
160   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
161   v->data        = (void*)s;
162   s->nghost      = nghost;
163   v->mapping     = 0;
164   v->bmapping    = 0;
165   v->petscnative = PETSC_TRUE;
166 
167   if (v->map.bs == -1) v->map.bs = 1;
168   ierr = PetscMapSetUp(&v->map);CHKERRQ(ierr);
169   if (array) {
170     s->array           = (PetscScalar *)array;
171     s->array_allocated = 0;
172   } else {
173     PetscInt n         = v->map.n+nghost;
174     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
175     s->array_allocated = s->array;
176     ierr               = PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));CHKERRQ(ierr);
177   }
178 
179   /* By default parallel vectors do not have local representation */
180   s->localrep    = 0;
181   s->localupdate = 0;
182 
183   v->stash.insertmode  = NOT_SET_VALUES;
184   /* create the stashes. The block-size for bstash is set later when
185      VecSetValuesBlocked is called.
186   */
187   ierr = VecStashCreate_Private(v->comm,1,&v->stash);CHKERRQ(ierr);
188   ierr = VecStashCreate_Private(v->comm,v->map.bs,&v->bstash);CHKERRQ(ierr);
189 
190 #if defined(PETSC_HAVE_MATLAB_ENGINE)
191   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
192   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
193 #endif
194   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
195   ierr = PetscPublishAll(v);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 /*MC
200    VECMPI - VECMPI = "mpi" - The basic parallel vector
201 
202    Options Database Keys:
203 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
204 
205   Level: beginner
206 
207 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
208 M*/
209 
210 EXTERN_C_BEGIN
211 #undef __FUNCT__
212 #define __FUNCT__ "VecCreate_MPI"
213 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
214 {
215   PetscErrorCode ierr;
216 
217   PetscFunctionBegin;
218   ierr = VecCreate_MPI_Private(vv,0,0);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 EXTERN_C_END
222 
223 #undef __FUNCT__
224 #define __FUNCT__ "VecCreateMPIWithArray"
225 /*@C
226    VecCreateMPIWithArray - Creates a parallel, array-style vector,
227    where the user provides the array space to store the vector values.
228 
229    Collective on MPI_Comm
230 
231    Input Parameters:
232 +  comm  - the MPI communicator to use
233 .  n     - local vector length, cannot be PETSC_DECIDE
234 .  N     - global vector length (or PETSC_DECIDE to have calculated)
235 -  array - the user provided array to store the vector values
236 
237    Output Parameter:
238 .  vv - the vector
239 
240    Notes:
241    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
242    same type as an existing vector.
243 
244    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
245    at a later stage to SET the array for storing the vector values.
246 
247    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
248    The user should not free the array until the vector is destroyed.
249 
250    Level: intermediate
251 
252    Concepts: vectors^creating with array
253 
254 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
255           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
256 
257 @*/
258 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
259 {
260   PetscErrorCode ierr;
261 
262   PetscFunctionBegin;
263   if (n == PETSC_DECIDE) {
264     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
265   }
266   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
267   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
268   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
269   ierr = VecCreate_MPI_Private(*vv,0,array);CHKERRQ(ierr);
270   PetscFunctionReturn(0);
271 }
272 
273 #undef __FUNCT__
274 #define __FUNCT__ "VecGhostGetLocalForm"
275 /*@
276     VecGhostGetLocalForm - Obtains the local ghosted representation of
277     a parallel vector created with VecCreateGhost().
278 
279     Not Collective
280 
281     Input Parameter:
282 .   g - the global vector. Vector must be have been obtained with either
283         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
284 
285     Output Parameter:
286 .   l - the local (ghosted) representation
287 
288     Notes:
289     This routine does not actually update the ghost values, but rather it
290     returns a sequential vector that includes the locations for the ghost
291     values and their current values. The returned vector and the original
292     vector passed in share the same array that contains the actual vector data.
293 
294     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
295     finished using the object.
296 
297     Level: advanced
298 
299    Concepts: vectors^ghost point access
300 
301 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
302 
303 @*/
304 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
305 {
306   PetscErrorCode ierr;
307   PetscTruth     isseq,ismpi;
308 
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
311   PetscValidPointer(l,2);
312 
313   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
314   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
315   if (ismpi) {
316     Vec_MPI *v  = (Vec_MPI*)g->data;
317     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
318     *l = v->localrep;
319   } else if (isseq) {
320     *l = g;
321   } else {
322     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
323   }
324   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
325   PetscFunctionReturn(0);
326 }
327 
328 #undef __FUNCT__
329 #define __FUNCT__ "VecGhostRestoreLocalForm"
330 /*@
331     VecGhostRestoreLocalForm - Restores the local ghosted representation of
332     a parallel vector obtained with VecGhostGetLocalForm().
333 
334     Not Collective
335 
336     Input Parameter:
337 +   g - the global vector
338 -   l - the local (ghosted) representation
339 
340     Notes:
341     This routine does not actually update the ghost values, but rather it
342     returns a sequential vector that includes the locations for the ghost values
343     and their current values.
344 
345     Level: advanced
346 
347 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
348 @*/
349 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
350 {
351   PetscFunctionBegin;
352   PetscObjectDereference((PetscObject)*l);
353   PetscFunctionReturn(0);
354 }
355 
356 #undef __FUNCT__
357 #define __FUNCT__ "VecGhostUpdateBegin"
358 /*@
359    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
360    local representation to global or global representation to local.
361 
362    Collective on Vec
363 
364    Input Parameters:
365 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
366 .  insertmode - one of ADD_VALUES or INSERT_VALUES
367 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
368 
369    Notes:
370    Use the following to update the ghost regions with correct values from the owning process
371 .vb
372        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
373        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
374 .ve
375 
376    Use the following to accumulate the ghost region values onto the owning processors
377 .vb
378        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
379        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
380 .ve
381 
382    To accumulate the ghost region values onto the owning processors and then update
383    the ghost regions correctly, call the later followed by the former, i.e.,
384 .vb
385        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
386        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
387        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
388        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
389 .ve
390 
391    Level: advanced
392 
393 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
394           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
395 
396 @*/
397 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
398 {
399   Vec_MPI        *v;
400   PetscErrorCode ierr;
401 
402   PetscFunctionBegin;
403   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
404 
405   v  = (Vec_MPI*)g->data;
406   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
407   if (!v->localupdate) PetscFunctionReturn(0);
408 
409   if (scattermode == SCATTER_REVERSE) {
410     ierr = VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
411   } else {
412     ierr = VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
413   }
414   PetscFunctionReturn(0);
415 }
416 
417 #undef __FUNCT__
418 #define __FUNCT__ "VecGhostUpdateEnd"
419 /*@
420    VecGhostUpdateEnd - End the vector scatter to update the vector from
421    local representation to global or global representation to local.
422 
423    Collective on Vec
424 
425    Input Parameters:
426 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
427 .  insertmode - one of ADD_VALUES or INSERT_VALUES
428 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
429 
430    Notes:
431 
432    Use the following to update the ghost regions with correct values from the owning process
433 .vb
434        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
435        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
436 .ve
437 
438    Use the following to accumulate the ghost region values onto the owning processors
439 .vb
440        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
441        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
442 .ve
443 
444    To accumulate the ghost region values onto the owning processors and then update
445    the ghost regions correctly, call the later followed by the former, i.e.,
446 .vb
447        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
448        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
449        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
450        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
451 .ve
452 
453    Level: advanced
454 
455 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
456           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
457 
458 @*/
459 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
460 {
461   Vec_MPI        *v;
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
466 
467   v  = (Vec_MPI*)g->data;
468   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
469   if (!v->localupdate) PetscFunctionReturn(0);
470 
471   if (scattermode == SCATTER_REVERSE) {
472     ierr = VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
473   } else {
474     ierr = VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
475   }
476   PetscFunctionReturn(0);
477 }
478 
479 #undef __FUNCT__
480 #define __FUNCT__ "VecCreateGhostWithArray"
481 /*@C
482    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
483    the caller allocates the array space.
484 
485    Collective on MPI_Comm
486 
487    Input Parameters:
488 +  comm - the MPI communicator to use
489 .  n - local vector length
490 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
491 .  nghost - number of local ghost points
492 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
493 -  array - the space to store the vector values (as long as n + nghost)
494 
495    Output Parameter:
496 .  vv - the global vector representation (without ghost points as part of vector)
497 
498    Notes:
499    Use VecGhostGetLocalForm() to access the local, ghosted representation
500    of the vector.
501 
502    This also automatically sets the ISLocalToGlobalMapping() for this vector.
503 
504    Level: advanced
505 
506    Concepts: vectors^creating with array
507    Concepts: vectors^ghosted
508 
509 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
510           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
511           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
512 
513 @*/
514 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
515 {
516   PetscErrorCode         ierr;
517   Vec_MPI                *w;
518   PetscScalar            *larray;
519   IS                     from,to;
520   ISLocalToGlobalMapping ltog;
521   PetscInt               rstart,i,*indices;
522 
523   PetscFunctionBegin;
524   *vv = 0;
525 
526   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
527   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
528   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
529   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
530   /* Create global representation */
531   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
532   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
533   ierr = VecCreate_MPI_Private(*vv,nghost,array);CHKERRQ(ierr);
534   w    = (Vec_MPI *)(*vv)->data;
535   /* Create local representation */
536   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
537   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
538   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
539   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
540 
541   /*
542        Create scatter context for scattering (updating) ghost values
543   */
544   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
545   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
546   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
547   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
548   ierr = ISDestroy(to);CHKERRQ(ierr);
549   ierr = ISDestroy(from);CHKERRQ(ierr);
550 
551   /* set local to global mapping for ghosted vector */
552   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
553   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
554   for (i=0; i<n; i++) {
555     indices[i] = rstart + i;
556   }
557   for (i=0; i<nghost; i++) {
558     indices[n+i] = ghosts[i];
559   }
560   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
561   ierr = PetscFree(indices);CHKERRQ(ierr);
562   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
563   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
564   ierr = PetscFree(indices);CHKERRQ(ierr);
565   PetscFunctionReturn(0);
566 }
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "VecCreateGhost"
570 /*@
571    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
572 
573    Collective on MPI_Comm
574 
575    Input Parameters:
576 +  comm - the MPI communicator to use
577 .  n - local vector length
578 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
579 .  nghost - number of local ghost points
580 -  ghosts - global indices of ghost points
581 
582    Output Parameter:
583 .  vv - the global vector representation (without ghost points as part of vector)
584 
585    Notes:
586    Use VecGhostGetLocalForm() to access the local, ghosted representation
587    of the vector.
588 
589    This also automatically sets the ISLocalToGlobalMapping() for this vector.
590 
591    Level: advanced
592 
593    Concepts: vectors^ghosted
594 
595 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
596           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
597           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
598           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
599 
600 @*/
601 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
602 {
603   PetscErrorCode ierr;
604 
605   PetscFunctionBegin;
606   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
607   PetscFunctionReturn(0);
608 }
609 
610 #undef __FUNCT__
611 #define __FUNCT__ "VecDuplicate_MPI"
612 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
613 {
614   PetscErrorCode ierr;
615   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
616   PetscScalar    *array;
617 
618   PetscFunctionBegin;
619   ierr = VecCreate(win->comm,v);CHKERRQ(ierr);
620   ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr);
621   ierr = VecCreate_MPI_Private(*v,w->nghost,0);CHKERRQ(ierr);
622   vw   = (Vec_MPI *)(*v)->data;
623   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
624 
625   /* save local representation of the parallel vector (and scatter) if it exists */
626   if (w->localrep) {
627     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
628     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
629     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
630     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
631     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
632     vw->localupdate = w->localupdate;
633     if (vw->localupdate) {
634       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
635     }
636   }
637 
638   /* New vector should inherit stashing property of parent */
639   (*v)->stash.donotstash = win->stash.donotstash;
640   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
641 
642   ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr);
643   ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr);
644   if (win->mapping) {
645     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
646     (*v)->mapping = win->mapping;
647   }
648   if (win->bmapping) {
649     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
650     (*v)->bmapping = win->bmapping;
651   }
652   (*v)->map.bs    = win->map.bs;
653   (*v)->bstash.bs = win->bstash.bs;
654 
655   PetscFunctionReturn(0);
656 }
657 
658 /* ------------------------------------------------------------------------------------------*/
659 #undef __FUNCT__
660 #define __FUNCT__ "VecCreateGhostBlockWithArray"
661 /*@C
662    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
663    the caller allocates the array space. Indices in the ghost region are based on blocks.
664 
665    Collective on MPI_Comm
666 
667    Input Parameters:
668 +  comm - the MPI communicator to use
669 .  bs - block size
670 .  n - local vector length
671 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
672 .  nghost - number of local ghost blocks
673 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
674 -  array - the space to store the vector values (as long as n + nghost*bs)
675 
676    Output Parameter:
677 .  vv - the global vector representation (without ghost points as part of vector)
678 
679    Notes:
680    Use VecGhostGetLocalForm() to access the local, ghosted representation
681    of the vector.
682 
683    n is the local vector size (total local size not the number of blocks) while nghost
684    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
685    portion is bs*nghost
686 
687    Level: advanced
688 
689    Concepts: vectors^creating ghosted
690    Concepts: vectors^creating with array
691 
692 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
693           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
694           VecCreateGhostWithArray(), VecCreateGhostBlocked()
695 
696 @*/
697 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
698 {
699   PetscErrorCode ierr;
700   Vec_MPI        *w;
701   PetscScalar    *larray;
702   IS             from,to;
703 
704   PetscFunctionBegin;
705   *vv = 0;
706 
707   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
708   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
709   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
710   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
711   /* Create global representation */
712   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
713   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
714   ierr = VecCreate_MPI_Private(*vv,nghost*bs,array);CHKERRQ(ierr);
715   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
716   w    = (Vec_MPI *)(*vv)->data;
717   /* Create local representation */
718   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
719   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
720   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
721   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
722   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
723 
724   /*
725        Create scatter context for scattering (updating) ghost values
726   */
727   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
728   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
729   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
730   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
731   ierr = ISDestroy(to);CHKERRQ(ierr);
732   ierr = ISDestroy(from);CHKERRQ(ierr);
733 
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "VecCreateGhostBlock"
739 /*@
740    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
741         The indicing of the ghost points is done with blocks.
742 
743    Collective on MPI_Comm
744 
745    Input Parameters:
746 +  comm - the MPI communicator to use
747 .  bs - the block size
748 .  n - local vector length
749 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
750 .  nghost - number of local ghost blocks
751 -  ghosts - global indices of ghost blocks
752 
753    Output Parameter:
754 .  vv - the global vector representation (without ghost points as part of vector)
755 
756    Notes:
757    Use VecGhostGetLocalForm() to access the local, ghosted representation
758    of the vector.
759 
760    n is the local vector size (total local size not the number of blocks) while nghost
761    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
762    portion is bs*nghost
763 
764    Level: advanced
765 
766    Concepts: vectors^ghosted
767 
768 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
769           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
770           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
771 
772 @*/
773 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
774 {
775   PetscErrorCode ierr;
776 
777   PetscFunctionBegin;
778   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
779   PetscFunctionReturn(0);
780 }
781 
782 /*
783     These introduce a ghosted vector where the ghosting is determined by the call to
784   VecSetLocalToGlobalMapping()
785 */
786 
787 #undef __FUNCT__
788 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
789 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
790 {
791   PetscErrorCode ierr;
792   Vec_MPI        *v = (Vec_MPI *)vv->data;
793 
794   PetscFunctionBegin;
795   v->nghost = map->n - vv->map.n;
796 
797   /* we need to make longer the array space that was allocated when the vector was created */
798   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
799   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
800   v->array = v->array_allocated;
801 
802   /* Create local representation */
803   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
804   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
805   PetscFunctionReturn(0);
806 }
807 
808 
809 #undef __FUNCT__
810 #define __FUNCT__ "VecSetValuesLocal_FETI"
811 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
812 {
813   PetscErrorCode ierr;
814   Vec_MPI        *v = (Vec_MPI *)vv->data;
815 
816   PetscFunctionBegin;
817   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
818   PetscFunctionReturn(0);
819 }
820 
821 EXTERN_C_BEGIN
822 #undef __FUNCT__
823 #define __FUNCT__ "VecCreate_FETI"
824 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
825 {
826   PetscErrorCode ierr;
827 
828   PetscFunctionBegin;
829   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
830 
831   /* overwrite the functions to handle setting values locally */
832   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
833   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
834   vv->ops->assemblybegin           = 0;
835   vv->ops->assemblyend             = 0;
836   vv->ops->setvaluesblocked        = 0;
837   vv->ops->setvaluesblocked        = 0;
838 
839   PetscFunctionReturn(0);
840 }
841 EXTERN_C_END
842 
843 
844 
845 
846 
847 
848 
849 
850 
851