xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision c87e5d42f888a3a565cbd99898d986425684792c)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #if 0
8 #undef __FUNCT__
9 #define __FUNCT__ "VecPublish_MPI"
10 static PetscErrorCode VecPublish_MPI(PetscObject obj)
11 {
12   PetscFunctionBegin;
13   PetscFunctionReturn(0);
14 }
15 #endif
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "VecDot_MPI"
19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
26   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
27   *z = sum;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "VecTDot_MPI"
33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34 {
35   PetscScalar    sum,work;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
40   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
41   *z   = sum;
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "VecSetOption_MPI"
47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48 {
49   PetscFunctionBegin;
50   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51     v->stash.donotstash = flag;
52   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53     v->stash.ignorenegidx = flag;
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
59 EXTERN_C_BEGIN
60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
61 EXTERN_C_END
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "VecPlaceArray_MPI"
65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66 {
67   PetscErrorCode ierr;
68   Vec_MPI        *v = (Vec_MPI *)vin->data;
69 
70   PetscFunctionBegin;
71   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
73   v->array = (PetscScalar *)a;
74   if (v->localrep) {
75     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "VecResetArray_MPI"
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI        *v = (Vec_MPI *)vin->data;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   v->array         = v->unplacedarray;
89   v->unplacedarray = 0;
90   if (v->localrep) {
91     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*);
97 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
98 
99 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
100             VecDuplicateVecs_Default,
101             VecDestroyVecs_Default,
102             VecDot_MPI,
103             VecMDot_MPI,
104             VecNorm_MPI,
105             VecTDot_MPI,
106             VecMTDot_MPI,
107             VecScale_Seq,
108             VecCopy_Seq, /* 10 */
109             VecSet_Seq,
110             VecSwap_Seq,
111             VecAXPY_Seq,
112             VecAXPBY_Seq,
113             VecMAXPY_Seq,
114             VecAYPX_Seq,
115             VecWAXPY_Seq,
116             VecAXPBYPCZ_Seq,
117             VecPointwiseMult_Seq,
118             VecPointwiseDivide_Seq,
119             VecSetValues_MPI, /* 20 */
120             VecAssemblyBegin_MPI,
121             VecAssemblyEnd_MPI,
122             VecGetArray_Seq,
123             VecGetSize_MPI,
124             VecGetSize_Seq,
125             VecRestoreArray_Seq,
126             VecMax_MPI,
127             VecMin_MPI,
128             VecSetRandom_Seq,
129             VecSetOption_MPI,
130             VecSetValuesBlocked_MPI,
131             VecDestroy_MPI,
132             VecView_MPI,
133             VecPlaceArray_MPI,
134             VecReplaceArray_Seq,
135             VecDot_Seq,
136             VecTDot_Seq,
137             VecNorm_Seq,
138             VecMDot_Seq,
139             VecMTDot_Seq,
140             VecLoadIntoVector_Default,
141             VecReciprocal_Default,
142             0, /* VecViewNative... */
143             VecConjugate_Seq,
144             0,
145             0,
146             VecResetArray_MPI,
147             0,
148             VecMaxPointwiseDivide_Seq,
149             VecLoad_Binary,
150             VecPointwiseMax_Seq,
151             VecPointwiseMaxAbs_Seq,
152             VecPointwiseMin_Seq,
153             VecGetValues_MPI};
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "VecCreate_MPI_Private"
157 /*
158     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
159     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
160     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
161 
162     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
163     no space is allocated.
164 */
165 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
166 {
167   Vec_MPI        *s;
168   PetscErrorCode ierr;
169 
170   PetscFunctionBegin;
171 
172   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
173   v->data        = (void*)s;
174   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
175   s->nghost      = nghost;
176   v->mapping     = 0;
177   v->bmapping    = 0;
178   v->petscnative = PETSC_TRUE;
179 
180   if (v->map->bs == -1) v->map->bs = 1;
181   ierr = PetscMapSetUp(v->map);CHKERRQ(ierr);
182   s->array           = (PetscScalar *)array;
183   s->array_allocated = 0;
184   if (alloc && !array) {
185     PetscInt n         = v->map->n+nghost;
186     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
187     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
188     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
189     s->array_allocated = s->array;
190   }
191 
192   /* By default parallel vectors do not have local representation */
193   s->localrep    = 0;
194   s->localupdate = 0;
195 
196   v->stash.insertmode  = NOT_SET_VALUES;
197   /* create the stashes. The block-size for bstash is set later when
198      VecSetValuesBlocked is called.
199   */
200   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
201   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
202 
203 #if defined(PETSC_HAVE_MATLAB_ENGINE)
204   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
205   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
206 #endif
207   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
208   ierr = PetscPublishAll(v);CHKERRQ(ierr);
209   PetscFunctionReturn(0);
210 }
211 
212 /*MC
213    VECMPI - VECMPI = "mpi" - The basic parallel vector
214 
215    Options Database Keys:
216 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
217 
218   Level: beginner
219 
220 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
221 M*/
222 
223 EXTERN_C_BEGIN
224 #undef __FUNCT__
225 #define __FUNCT__ "VecCreate_MPI"
226 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
227 {
228   PetscErrorCode ierr;
229 
230   PetscFunctionBegin;
231   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 EXTERN_C_END
235 
236 #undef __FUNCT__
237 #define __FUNCT__ "VecCreateMPIWithArray"
238 /*@C
239    VecCreateMPIWithArray - Creates a parallel, array-style vector,
240    where the user provides the array space to store the vector values.
241 
242    Collective on MPI_Comm
243 
244    Input Parameters:
245 +  comm  - the MPI communicator to use
246 .  n     - local vector length, cannot be PETSC_DECIDE
247 .  N     - global vector length (or PETSC_DECIDE to have calculated)
248 -  array - the user provided array to store the vector values
249 
250    Output Parameter:
251 .  vv - the vector
252 
253    Notes:
254    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
255    same type as an existing vector.
256 
257    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
258    at a later stage to SET the array for storing the vector values.
259 
260    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
261    The user should not free the array until the vector is destroyed.
262 
263    Level: intermediate
264 
265    Concepts: vectors^creating with array
266 
267 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
268           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
269 
270 @*/
271 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
272 {
273   PetscErrorCode ierr;
274 
275   PetscFunctionBegin;
276   if (n == PETSC_DECIDE) {
277     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
278   }
279   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
280   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
281   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
282   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
283   PetscFunctionReturn(0);
284 }
285 
286 #undef __FUNCT__
287 #define __FUNCT__ "VecGhostGetLocalForm"
288 /*@
289     VecGhostGetLocalForm - Obtains the local ghosted representation of
290     a parallel vector created with VecCreateGhost().
291 
292     Not Collective
293 
294     Input Parameter:
295 .   g - the global vector. Vector must be have been obtained with either
296         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
297 
298     Output Parameter:
299 .   l - the local (ghosted) representation
300 
301     Notes:
302     This routine does not actually update the ghost values, but rather it
303     returns a sequential vector that includes the locations for the ghost
304     values and their current values. The returned vector and the original
305     vector passed in share the same array that contains the actual vector data.
306 
307     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
308     finished using the object.
309 
310     Level: advanced
311 
312    Concepts: vectors^ghost point access
313 
314 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
315 
316 @*/
317 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
318 {
319   PetscErrorCode ierr;
320   PetscTruth     isseq,ismpi;
321 
322   PetscFunctionBegin;
323   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
324   PetscValidPointer(l,2);
325 
326   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
327   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
328   if (ismpi) {
329     Vec_MPI *v  = (Vec_MPI*)g->data;
330     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
331     *l = v->localrep;
332   } else if (isseq) {
333     *l = g;
334   } else {
335     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
336   }
337   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "VecGhostRestoreLocalForm"
343 /*@
344     VecGhostRestoreLocalForm - Restores the local ghosted representation of
345     a parallel vector obtained with VecGhostGetLocalForm().
346 
347     Not Collective
348 
349     Input Parameter:
350 +   g - the global vector
351 -   l - the local (ghosted) representation
352 
353     Notes:
354     This routine does not actually update the ghost values, but rather it
355     returns a sequential vector that includes the locations for the ghost values
356     and their current values.
357 
358     Level: advanced
359 
360 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
361 @*/
362 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
363 {
364   PetscFunctionBegin;
365   PetscObjectDereference((PetscObject)*l);
366   PetscFunctionReturn(0);
367 }
368 
369 #undef __FUNCT__
370 #define __FUNCT__ "VecGhostUpdateBegin"
371 /*@
372    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
373    local representation to global or global representation to local.
374 
375    Collective on Vec
376 
377    Input Parameters:
378 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
379 .  insertmode - one of ADD_VALUES or INSERT_VALUES
380 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
381 
382    Notes:
383    Use the following to update the ghost regions with correct values from the owning process
384 .vb
385        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
386        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
387 .ve
388 
389    Use the following to accumulate the ghost region values onto the owning processors
390 .vb
391        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
392        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
393 .ve
394 
395    To accumulate the ghost region values onto the owning processors and then update
396    the ghost regions correctly, call the later followed by the former, i.e.,
397 .vb
398        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
399        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
400        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
401        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
402 .ve
403 
404    Level: advanced
405 
406 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
407           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
408 
409 @*/
410 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
411 {
412   Vec_MPI        *v;
413   PetscErrorCode ierr;
414 
415   PetscFunctionBegin;
416   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
417 
418   v  = (Vec_MPI*)g->data;
419   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
420   if (!v->localupdate) PetscFunctionReturn(0);
421 
422   if (scattermode == SCATTER_REVERSE) {
423     ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
424   } else {
425     ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
426   }
427   PetscFunctionReturn(0);
428 }
429 
430 #undef __FUNCT__
431 #define __FUNCT__ "VecGhostUpdateEnd"
432 /*@
433    VecGhostUpdateEnd - End the vector scatter to update the vector from
434    local representation to global or global representation to local.
435 
436    Collective on Vec
437 
438    Input Parameters:
439 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
440 .  insertmode - one of ADD_VALUES or INSERT_VALUES
441 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
442 
443    Notes:
444 
445    Use the following to update the ghost regions with correct values from the owning process
446 .vb
447        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
448        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
449 .ve
450 
451    Use the following to accumulate the ghost region values onto the owning processors
452 .vb
453        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
454        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
455 .ve
456 
457    To accumulate the ghost region values onto the owning processors and then update
458    the ghost regions correctly, call the later followed by the former, i.e.,
459 .vb
460        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
461        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
462        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
463        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
464 .ve
465 
466    Level: advanced
467 
468 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
469           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
470 
471 @*/
472 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
473 {
474   Vec_MPI        *v;
475   PetscErrorCode ierr;
476 
477   PetscFunctionBegin;
478   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
479 
480   v  = (Vec_MPI*)g->data;
481   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
482   if (!v->localupdate) PetscFunctionReturn(0);
483 
484   if (scattermode == SCATTER_REVERSE) {
485     ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
486   } else {
487     ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
488   }
489   PetscFunctionReturn(0);
490 }
491 
492 #undef __FUNCT__
493 #define __FUNCT__ "VecCreateGhostWithArray"
494 /*@C
495    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
496    the caller allocates the array space.
497 
498    Collective on MPI_Comm
499 
500    Input Parameters:
501 +  comm - the MPI communicator to use
502 .  n - local vector length
503 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
504 .  nghost - number of local ghost points
505 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
506 -  array - the space to store the vector values (as long as n + nghost)
507 
508    Output Parameter:
509 .  vv - the global vector representation (without ghost points as part of vector)
510 
511    Notes:
512    Use VecGhostGetLocalForm() to access the local, ghosted representation
513    of the vector.
514 
515    This also automatically sets the ISLocalToGlobalMapping() for this vector.
516 
517    Level: advanced
518 
519    Concepts: vectors^creating with array
520    Concepts: vectors^ghosted
521 
522 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
523           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
524           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
525 
526 @*/
527 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
528 {
529   PetscErrorCode         ierr;
530   Vec_MPI                *w;
531   PetscScalar            *larray;
532   IS                     from,to;
533   ISLocalToGlobalMapping ltog;
534   PetscInt               rstart,i,*indices;
535 
536   PetscFunctionBegin;
537   *vv = 0;
538 
539   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
540   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
541   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
542   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
543   /* Create global representation */
544   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
545   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
546   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
547   w    = (Vec_MPI *)(*vv)->data;
548   /* Create local representation */
549   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
550   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
551   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
552   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
553 
554   /*
555        Create scatter context for scattering (updating) ghost values
556   */
557   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
558   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
559   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
560   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
561   ierr = ISDestroy(to);CHKERRQ(ierr);
562   ierr = ISDestroy(from);CHKERRQ(ierr);
563 
564   /* set local to global mapping for ghosted vector */
565   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
566   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
567   for (i=0; i<n; i++) {
568     indices[i] = rstart + i;
569   }
570   for (i=0; i<nghost; i++) {
571     indices[n+i] = ghosts[i];
572   }
573   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
574   ierr = PetscFree(indices);CHKERRQ(ierr);
575   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
576   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
577   ierr = PetscFree(indices);CHKERRQ(ierr);
578   PetscFunctionReturn(0);
579 }
580 
581 #undef __FUNCT__
582 #define __FUNCT__ "VecCreateGhost"
583 /*@
584    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
585 
586    Collective on MPI_Comm
587 
588    Input Parameters:
589 +  comm - the MPI communicator to use
590 .  n - local vector length
591 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
592 .  nghost - number of local ghost points
593 -  ghosts - global indices of ghost points
594 
595    Output Parameter:
596 .  vv - the global vector representation (without ghost points as part of vector)
597 
598    Notes:
599    Use VecGhostGetLocalForm() to access the local, ghosted representation
600    of the vector.
601 
602    This also automatically sets the ISLocalToGlobalMapping() for this vector.
603 
604    Level: advanced
605 
606    Concepts: vectors^ghosted
607 
608 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
609           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
610           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
611           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
612 
613 @*/
614 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
615 {
616   PetscErrorCode ierr;
617 
618   PetscFunctionBegin;
619   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
620   PetscFunctionReturn(0);
621 }
622 
623 #undef __FUNCT__
624 #define __FUNCT__ "VecDuplicate_MPI"
625 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
626 {
627   PetscErrorCode ierr;
628   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
629   PetscScalar    *array;
630 
631   PetscFunctionBegin;
632   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
633 
634   /* use the map that exists aleady in win */
635   ierr = PetscMapDestroy((*v)->map);CHKERRQ(ierr);
636   (*v)->map = win->map;
637   win->map->refcnt++;
638 
639   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
640   vw   = (Vec_MPI *)(*v)->data;
641   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
642 
643   /* save local representation of the parallel vector (and scatter) if it exists */
644   if (w->localrep) {
645     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
646     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
647     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
648     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
649     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
650     vw->localupdate = w->localupdate;
651     if (vw->localupdate) {
652       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
653     }
654   }
655 
656   /* New vector should inherit stashing property of parent */
657   (*v)->stash.donotstash = win->stash.donotstash;
658   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
659 
660   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
661   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
662   if (win->mapping) {
663     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
664     (*v)->mapping = win->mapping;
665   }
666   if (win->bmapping) {
667     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
668     (*v)->bmapping = win->bmapping;
669   }
670   (*v)->map->bs    = win->map->bs;
671   (*v)->bstash.bs = win->bstash.bs;
672 
673   PetscFunctionReturn(0);
674 }
675 
676 /* ------------------------------------------------------------------------------------------*/
677 #undef __FUNCT__
678 #define __FUNCT__ "VecCreateGhostBlockWithArray"
679 /*@C
680    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
681    the caller allocates the array space. Indices in the ghost region are based on blocks.
682 
683    Collective on MPI_Comm
684 
685    Input Parameters:
686 +  comm - the MPI communicator to use
687 .  bs - block size
688 .  n - local vector length
689 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
690 .  nghost - number of local ghost blocks
691 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
692 -  array - the space to store the vector values (as long as n + nghost*bs)
693 
694    Output Parameter:
695 .  vv - the global vector representation (without ghost points as part of vector)
696 
697    Notes:
698    Use VecGhostGetLocalForm() to access the local, ghosted representation
699    of the vector.
700 
701    n is the local vector size (total local size not the number of blocks) while nghost
702    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
703    portion is bs*nghost
704 
705    Level: advanced
706 
707    Concepts: vectors^creating ghosted
708    Concepts: vectors^creating with array
709 
710 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
711           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
712           VecCreateGhostWithArray(), VecCreateGhostBlocked()
713 
714 @*/
715 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
716 {
717   PetscErrorCode ierr;
718   Vec_MPI        *w;
719   PetscScalar    *larray;
720   IS             from,to;
721 
722   PetscFunctionBegin;
723   *vv = 0;
724 
725   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
726   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
727   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
728   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
729   /* Create global representation */
730   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
731   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
732   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,nghost*bs,array);CHKERRQ(ierr);
733   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
734   w    = (Vec_MPI *)(*vv)->data;
735   /* Create local representation */
736   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
737   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
738   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
739   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
740   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
741 
742   /*
743        Create scatter context for scattering (updating) ghost values
744   */
745   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
746   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
747   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
748   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
749   ierr = ISDestroy(to);CHKERRQ(ierr);
750   ierr = ISDestroy(from);CHKERRQ(ierr);
751 
752   PetscFunctionReturn(0);
753 }
754 
755 #undef __FUNCT__
756 #define __FUNCT__ "VecCreateGhostBlock"
757 /*@
758    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
759         The indicing of the ghost points is done with blocks.
760 
761    Collective on MPI_Comm
762 
763    Input Parameters:
764 +  comm - the MPI communicator to use
765 .  bs - the block size
766 .  n - local vector length
767 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
768 .  nghost - number of local ghost blocks
769 -  ghosts - global indices of ghost blocks
770 
771    Output Parameter:
772 .  vv - the global vector representation (without ghost points as part of vector)
773 
774    Notes:
775    Use VecGhostGetLocalForm() to access the local, ghosted representation
776    of the vector.
777 
778    n is the local vector size (total local size not the number of blocks) while nghost
779    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
780    portion is bs*nghost
781 
782    Level: advanced
783 
784    Concepts: vectors^ghosted
785 
786 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
787           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
788           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
789 
790 @*/
791 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
792 {
793   PetscErrorCode ierr;
794 
795   PetscFunctionBegin;
796   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
797   PetscFunctionReturn(0);
798 }
799 
800 /*
801     These introduce a ghosted vector where the ghosting is determined by the call to
802   VecSetLocalToGlobalMapping()
803 */
804 
805 #undef __FUNCT__
806 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
807 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
808 {
809   PetscErrorCode ierr;
810   Vec_MPI        *v = (Vec_MPI *)vv->data;
811 
812   PetscFunctionBegin;
813   v->nghost = map->n - vv->map->n;
814 
815   /* we need to make longer the array space that was allocated when the vector was created */
816   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
817   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
818   v->array = v->array_allocated;
819 
820   /* Create local representation */
821   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
822   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
823   PetscFunctionReturn(0);
824 }
825 
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "VecSetValuesLocal_FETI"
829 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
830 {
831   PetscErrorCode ierr;
832   Vec_MPI        *v = (Vec_MPI *)vv->data;
833 
834   PetscFunctionBegin;
835   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
836   PetscFunctionReturn(0);
837 }
838 
839 EXTERN_C_BEGIN
840 #undef __FUNCT__
841 #define __FUNCT__ "VecCreate_FETI"
842 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
843 {
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
848 
849   /* overwrite the functions to handle setting values locally */
850   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
851   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
852   vv->ops->assemblybegin           = 0;
853   vv->ops->assemblyend             = 0;
854   vv->ops->setvaluesblocked        = 0;
855   vv->ops->setvaluesblocked        = 0;
856 
857   PetscFunctionReturn(0);
858 }
859 EXTERN_C_END
860 
861 
862 
863 
864 
865 
866 
867 
868 
869