xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision cce0b1b27d2ec7cd90640d8c89da2867b0221c52)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "../src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #if 0
8 #undef __FUNCT__
9 #define __FUNCT__ "VecPublish_MPI"
10 static PetscErrorCode VecPublish_MPI(PetscObject obj)
11 {
12   PetscFunctionBegin;
13   PetscFunctionReturn(0);
14 }
15 #endif
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "VecDot_MPI"
19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
26   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
27   *z = sum;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "VecTDot_MPI"
33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34 {
35   PetscScalar    sum,work;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
40   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
41   *z   = sum;
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "VecSetOption_MPI"
47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48 {
49   PetscFunctionBegin;
50   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51     v->stash.donotstash = flag;
52   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53     v->stash.ignorenegidx = flag;
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
59 EXTERN_C_BEGIN
60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
61 EXTERN_C_END
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "VecPlaceArray_MPI"
65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66 {
67   PetscErrorCode ierr;
68   Vec_MPI        *v = (Vec_MPI *)vin->data;
69 
70   PetscFunctionBegin;
71   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
73   v->array = (PetscScalar *)a;
74   if (v->localrep) {
75     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "VecResetArray_MPI"
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI        *v = (Vec_MPI *)vin->data;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   v->array         = v->unplacedarray;
89   v->unplacedarray = 0;
90   if (v->localrep) {
91     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*);
97 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
98 
99 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
100             VecDuplicateVecs_Default,
101             VecDestroyVecs_Default,
102             VecDot_MPI,
103             VecMDot_MPI,
104             VecNorm_MPI,
105             VecTDot_MPI,
106             VecMTDot_MPI,
107             VecScale_Seq,
108             VecCopy_Seq, /* 10 */
109             VecSet_Seq,
110             VecSwap_Seq,
111             VecAXPY_Seq,
112             VecAXPBY_Seq,
113             VecMAXPY_Seq,
114             VecAYPX_Seq,
115             VecWAXPY_Seq,
116             VecAXPBYPCZ_Seq,
117             VecPointwiseMult_Seq,
118             VecPointwiseDivide_Seq,
119             VecSetValues_MPI, /* 20 */
120             VecAssemblyBegin_MPI,
121             VecAssemblyEnd_MPI,
122             VecGetArray_Seq,
123             VecGetSize_MPI,
124             VecGetSize_Seq,
125             VecRestoreArray_Seq,
126             VecMax_MPI,
127             VecMin_MPI,
128             VecSetRandom_Seq,
129             VecSetOption_MPI,
130             VecSetValuesBlocked_MPI,
131             VecDestroy_MPI,
132             VecView_MPI,
133             VecPlaceArray_MPI,
134             VecReplaceArray_Seq,
135             VecDot_Seq,
136             VecTDot_Seq,
137             VecNorm_Seq,
138             VecMDot_Seq,
139             VecMTDot_Seq,
140             VecLoadIntoVector_Default,
141             0, /* VecLoadIntoVectorNative */
142             VecReciprocal_Default,
143             0, /* VecViewNative... */
144             VecConjugate_Seq,
145             0,
146             0,
147             VecResetArray_MPI,
148             0,
149             VecMaxPointwiseDivide_Seq,
150             VecLoad_Binary,
151             VecPointwiseMax_Seq,
152             VecPointwiseMaxAbs_Seq,
153             VecPointwiseMin_Seq,
154             VecGetValues_MPI};
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "VecCreate_MPI_Private"
158 /*
159     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
160     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
161     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
162 
163     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
164     no space is allocated.
165 */
166 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
167 {
168   Vec_MPI        *s;
169   PetscErrorCode ierr;
170 
171   PetscFunctionBegin;
172 
173   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
174   v->data        = (void*)s;
175   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
176   s->nghost      = nghost;
177   v->mapping     = 0;
178   v->bmapping    = 0;
179   v->petscnative = PETSC_TRUE;
180 
181   if (v->map->bs == -1) v->map->bs = 1;
182   ierr = PetscMapSetUp(v->map);CHKERRQ(ierr);
183   s->array           = (PetscScalar *)array;
184   s->array_allocated = 0;
185   if (alloc && !array) {
186     PetscInt n         = v->map->n+nghost;
187     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
188     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
189     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
190     s->array_allocated = s->array;
191   }
192 
193   /* By default parallel vectors do not have local representation */
194   s->localrep    = 0;
195   s->localupdate = 0;
196 
197   v->stash.insertmode  = NOT_SET_VALUES;
198   /* create the stashes. The block-size for bstash is set later when
199      VecSetValuesBlocked is called.
200   */
201   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
202   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
203 
204 #if defined(PETSC_HAVE_MATLAB_ENGINE)
205   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
206   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
207 #endif
208   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
209   ierr = PetscPublishAll(v);CHKERRQ(ierr);
210   PetscFunctionReturn(0);
211 }
212 
213 /*MC
214    VECMPI - VECMPI = "mpi" - The basic parallel vector
215 
216    Options Database Keys:
217 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
218 
219   Level: beginner
220 
221 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
222 M*/
223 
224 EXTERN_C_BEGIN
225 #undef __FUNCT__
226 #define __FUNCT__ "VecCreate_MPI"
227 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
228 {
229   PetscErrorCode ierr;
230 
231   PetscFunctionBegin;
232   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
233   PetscFunctionReturn(0);
234 }
235 EXTERN_C_END
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "VecCreateMPIWithArray"
239 /*@C
240    VecCreateMPIWithArray - Creates a parallel, array-style vector,
241    where the user provides the array space to store the vector values.
242 
243    Collective on MPI_Comm
244 
245    Input Parameters:
246 +  comm  - the MPI communicator to use
247 .  n     - local vector length, cannot be PETSC_DECIDE
248 .  N     - global vector length (or PETSC_DECIDE to have calculated)
249 -  array - the user provided array to store the vector values
250 
251    Output Parameter:
252 .  vv - the vector
253 
254    Notes:
255    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
256    same type as an existing vector.
257 
258    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
259    at a later stage to SET the array for storing the vector values.
260 
261    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
262    The user should not free the array until the vector is destroyed.
263 
264    Level: intermediate
265 
266    Concepts: vectors^creating with array
267 
268 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
269           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
270 
271 @*/
272 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
273 {
274   PetscErrorCode ierr;
275 
276   PetscFunctionBegin;
277   if (n == PETSC_DECIDE) {
278     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
279   }
280   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
281   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
282   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
283   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
284   PetscFunctionReturn(0);
285 }
286 
287 #undef __FUNCT__
288 #define __FUNCT__ "VecGhostGetLocalForm"
289 /*@
290     VecGhostGetLocalForm - Obtains the local ghosted representation of
291     a parallel vector created with VecCreateGhost().
292 
293     Not Collective
294 
295     Input Parameter:
296 .   g - the global vector. Vector must be have been obtained with either
297         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
298 
299     Output Parameter:
300 .   l - the local (ghosted) representation
301 
302     Notes:
303     This routine does not actually update the ghost values, but rather it
304     returns a sequential vector that includes the locations for the ghost
305     values and their current values. The returned vector and the original
306     vector passed in share the same array that contains the actual vector data.
307 
308     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
309     finished using the object.
310 
311     Level: advanced
312 
313    Concepts: vectors^ghost point access
314 
315 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
316 
317 @*/
318 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
319 {
320   PetscErrorCode ierr;
321   PetscTruth     isseq,ismpi;
322 
323   PetscFunctionBegin;
324   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
325   PetscValidPointer(l,2);
326 
327   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
328   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
329   if (ismpi) {
330     Vec_MPI *v  = (Vec_MPI*)g->data;
331     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
332     *l = v->localrep;
333   } else if (isseq) {
334     *l = g;
335   } else {
336     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
337   }
338   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "VecGhostRestoreLocalForm"
344 /*@
345     VecGhostRestoreLocalForm - Restores the local ghosted representation of
346     a parallel vector obtained with VecGhostGetLocalForm().
347 
348     Not Collective
349 
350     Input Parameter:
351 +   g - the global vector
352 -   l - the local (ghosted) representation
353 
354     Notes:
355     This routine does not actually update the ghost values, but rather it
356     returns a sequential vector that includes the locations for the ghost values
357     and their current values.
358 
359     Level: advanced
360 
361 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
362 @*/
363 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
364 {
365   PetscFunctionBegin;
366   PetscObjectDereference((PetscObject)*l);
367   PetscFunctionReturn(0);
368 }
369 
370 #undef __FUNCT__
371 #define __FUNCT__ "VecGhostUpdateBegin"
372 /*@
373    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
374    local representation to global or global representation to local.
375 
376    Collective on Vec
377 
378    Input Parameters:
379 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
380 .  insertmode - one of ADD_VALUES or INSERT_VALUES
381 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
382 
383    Notes:
384    Use the following to update the ghost regions with correct values from the owning process
385 .vb
386        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
387        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
388 .ve
389 
390    Use the following to accumulate the ghost region values onto the owning processors
391 .vb
392        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
393        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
394 .ve
395 
396    To accumulate the ghost region values onto the owning processors and then update
397    the ghost regions correctly, call the later followed by the former, i.e.,
398 .vb
399        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
400        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
401        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
402        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
403 .ve
404 
405    Level: advanced
406 
407 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
408           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
409 
410 @*/
411 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
412 {
413   Vec_MPI        *v;
414   PetscErrorCode ierr;
415 
416   PetscFunctionBegin;
417   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
418 
419   v  = (Vec_MPI*)g->data;
420   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
421   if (!v->localupdate) PetscFunctionReturn(0);
422 
423   if (scattermode == SCATTER_REVERSE) {
424     ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
425   } else {
426     ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
427   }
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "VecGhostUpdateEnd"
433 /*@
434    VecGhostUpdateEnd - End the vector scatter to update the vector from
435    local representation to global or global representation to local.
436 
437    Collective on Vec
438 
439    Input Parameters:
440 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
441 .  insertmode - one of ADD_VALUES or INSERT_VALUES
442 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
443 
444    Notes:
445 
446    Use the following to update the ghost regions with correct values from the owning process
447 .vb
448        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
449        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
450 .ve
451 
452    Use the following to accumulate the ghost region values onto the owning processors
453 .vb
454        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
455        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
456 .ve
457 
458    To accumulate the ghost region values onto the owning processors and then update
459    the ghost regions correctly, call the later followed by the former, i.e.,
460 .vb
461        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
462        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
463        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
464        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
465 .ve
466 
467    Level: advanced
468 
469 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
470           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
471 
472 @*/
473 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
474 {
475   Vec_MPI        *v;
476   PetscErrorCode ierr;
477 
478   PetscFunctionBegin;
479   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
480 
481   v  = (Vec_MPI*)g->data;
482   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
483   if (!v->localupdate) PetscFunctionReturn(0);
484 
485   if (scattermode == SCATTER_REVERSE) {
486     ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
487   } else {
488     ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
489   }
490   PetscFunctionReturn(0);
491 }
492 
493 #undef __FUNCT__
494 #define __FUNCT__ "VecCreateGhostWithArray"
495 /*@C
496    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
497    the caller allocates the array space.
498 
499    Collective on MPI_Comm
500 
501    Input Parameters:
502 +  comm - the MPI communicator to use
503 .  n - local vector length
504 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
505 .  nghost - number of local ghost points
506 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
507 -  array - the space to store the vector values (as long as n + nghost)
508 
509    Output Parameter:
510 .  vv - the global vector representation (without ghost points as part of vector)
511 
512    Notes:
513    Use VecGhostGetLocalForm() to access the local, ghosted representation
514    of the vector.
515 
516    This also automatically sets the ISLocalToGlobalMapping() for this vector.
517 
518    Level: advanced
519 
520    Concepts: vectors^creating with array
521    Concepts: vectors^ghosted
522 
523 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
524           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
525           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
526 
527 @*/
528 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
529 {
530   PetscErrorCode         ierr;
531   Vec_MPI                *w;
532   PetscScalar            *larray;
533   IS                     from,to;
534   ISLocalToGlobalMapping ltog;
535   PetscInt               rstart,i,*indices;
536 
537   PetscFunctionBegin;
538   *vv = 0;
539 
540   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
541   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
542   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
543   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
544   /* Create global representation */
545   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
546   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
547   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
548   w    = (Vec_MPI *)(*vv)->data;
549   /* Create local representation */
550   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
551   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
552   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
553   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
554 
555   /*
556        Create scatter context for scattering (updating) ghost values
557   */
558   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
559   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
560   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
561   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
562   ierr = ISDestroy(to);CHKERRQ(ierr);
563   ierr = ISDestroy(from);CHKERRQ(ierr);
564 
565   /* set local to global mapping for ghosted vector */
566   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
567   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
568   for (i=0; i<n; i++) {
569     indices[i] = rstart + i;
570   }
571   for (i=0; i<nghost; i++) {
572     indices[n+i] = ghosts[i];
573   }
574   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
575   ierr = PetscFree(indices);CHKERRQ(ierr);
576   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
577   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
578   ierr = PetscFree(indices);CHKERRQ(ierr);
579   PetscFunctionReturn(0);
580 }
581 
582 #undef __FUNCT__
583 #define __FUNCT__ "VecCreateGhost"
584 /*@
585    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
586 
587    Collective on MPI_Comm
588 
589    Input Parameters:
590 +  comm - the MPI communicator to use
591 .  n - local vector length
592 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
593 .  nghost - number of local ghost points
594 -  ghosts - global indices of ghost points
595 
596    Output Parameter:
597 .  vv - the global vector representation (without ghost points as part of vector)
598 
599    Notes:
600    Use VecGhostGetLocalForm() to access the local, ghosted representation
601    of the vector.
602 
603    This also automatically sets the ISLocalToGlobalMapping() for this vector.
604 
605    Level: advanced
606 
607    Concepts: vectors^ghosted
608 
609 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
610           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
611           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
612           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
613 
614 @*/
615 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
616 {
617   PetscErrorCode ierr;
618 
619   PetscFunctionBegin;
620   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
621   PetscFunctionReturn(0);
622 }
623 
624 #undef __FUNCT__
625 #define __FUNCT__ "VecDuplicate_MPI"
626 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
627 {
628   PetscErrorCode ierr;
629   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
630   PetscScalar    *array;
631 
632   PetscFunctionBegin;
633   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
634 
635   /* use the map that exists aleady in win */
636   ierr = PetscMapDestroy((*v)->map);CHKERRQ(ierr);
637   (*v)->map = win->map;
638   win->map->refcnt++;
639 
640   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
641   vw   = (Vec_MPI *)(*v)->data;
642   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
643 
644   /* save local representation of the parallel vector (and scatter) if it exists */
645   if (w->localrep) {
646     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
647     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
648     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
649     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
650     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
651     vw->localupdate = w->localupdate;
652     if (vw->localupdate) {
653       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
654     }
655   }
656 
657   /* New vector should inherit stashing property of parent */
658   (*v)->stash.donotstash = win->stash.donotstash;
659   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
660 
661   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
662   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
663   if (win->mapping) {
664     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
665     (*v)->mapping = win->mapping;
666   }
667   if (win->bmapping) {
668     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
669     (*v)->bmapping = win->bmapping;
670   }
671   (*v)->map->bs    = win->map->bs;
672   (*v)->bstash.bs = win->bstash.bs;
673 
674   PetscFunctionReturn(0);
675 }
676 
677 /* ------------------------------------------------------------------------------------------*/
678 #undef __FUNCT__
679 #define __FUNCT__ "VecCreateGhostBlockWithArray"
680 /*@C
681    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
682    the caller allocates the array space. Indices in the ghost region are based on blocks.
683 
684    Collective on MPI_Comm
685 
686    Input Parameters:
687 +  comm - the MPI communicator to use
688 .  bs - block size
689 .  n - local vector length
690 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
691 .  nghost - number of local ghost blocks
692 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
693 -  array - the space to store the vector values (as long as n + nghost*bs)
694 
695    Output Parameter:
696 .  vv - the global vector representation (without ghost points as part of vector)
697 
698    Notes:
699    Use VecGhostGetLocalForm() to access the local, ghosted representation
700    of the vector.
701 
702    n is the local vector size (total local size not the number of blocks) while nghost
703    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
704    portion is bs*nghost
705 
706    Level: advanced
707 
708    Concepts: vectors^creating ghosted
709    Concepts: vectors^creating with array
710 
711 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
712           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
713           VecCreateGhostWithArray(), VecCreateGhostBlocked()
714 
715 @*/
716 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
717 {
718   PetscErrorCode ierr;
719   Vec_MPI        *w;
720   PetscScalar    *larray;
721   IS             from,to;
722   ISLocalToGlobalMapping ltog;
723   PetscInt       rstart,i,nb,*indices;
724 
725   PetscFunctionBegin;
726   *vv = 0;
727 
728   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
729   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
730   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
731   if (n % bs)                 SETERRQ(PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
732   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
733   /* Create global representation */
734   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
735   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
736   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
737   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
738   w    = (Vec_MPI *)(*vv)->data;
739   /* Create local representation */
740   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
741   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
742   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
743   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
744   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
745 
746   /*
747        Create scatter context for scattering (updating) ghost values
748   */
749   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
750   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
751   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
752   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
753   ierr = ISDestroy(to);CHKERRQ(ierr);
754   ierr = ISDestroy(from);CHKERRQ(ierr);
755 
756   /* set local to global mapping for ghosted vector */
757   nb = n/bs;
758   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
759   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
760   for (i=0; i<nb; i++) {
761     indices[i] = rstart + i*bs;
762   }
763   for (i=0; i<nghost; i++) {
764     indices[nb+i] = ghosts[i];
765   }
766   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,&ltog);CHKERRQ(ierr);
767   ierr = PetscFree(indices);CHKERRQ(ierr);
768   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
769   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
770   ierr = PetscFree(indices);CHKERRQ(ierr);
771 
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "VecCreateGhostBlock"
777 /*@
778    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
779         The indicing of the ghost points is done with blocks.
780 
781    Collective on MPI_Comm
782 
783    Input Parameters:
784 +  comm - the MPI communicator to use
785 .  bs - the block size
786 .  n - local vector length
787 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
788 .  nghost - number of local ghost blocks
789 -  ghosts - global indices of ghost blocks
790 
791    Output Parameter:
792 .  vv - the global vector representation (without ghost points as part of vector)
793 
794    Notes:
795    Use VecGhostGetLocalForm() to access the local, ghosted representation
796    of the vector.
797 
798    n is the local vector size (total local size not the number of blocks) while nghost
799    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
800    portion is bs*nghost
801 
802    Level: advanced
803 
804    Concepts: vectors^ghosted
805 
806 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
807           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
808           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
809 
810 @*/
811 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
812 {
813   PetscErrorCode ierr;
814 
815   PetscFunctionBegin;
816   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
817   PetscFunctionReturn(0);
818 }
819 
820 /*
821     These introduce a ghosted vector where the ghosting is determined by the call to
822   VecSetLocalToGlobalMapping()
823 */
824 
825 #undef __FUNCT__
826 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
827 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
828 {
829   PetscErrorCode ierr;
830   Vec_MPI        *v = (Vec_MPI *)vv->data;
831 
832   PetscFunctionBegin;
833   v->nghost = map->n - vv->map->n;
834 
835   /* we need to make longer the array space that was allocated when the vector was created */
836   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
837   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
838   v->array = v->array_allocated;
839 
840   /* Create local representation */
841   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
842   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845 
846 
847 #undef __FUNCT__
848 #define __FUNCT__ "VecSetValuesLocal_FETI"
849 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
850 {
851   PetscErrorCode ierr;
852   Vec_MPI        *v = (Vec_MPI *)vv->data;
853 
854   PetscFunctionBegin;
855   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
856   PetscFunctionReturn(0);
857 }
858 
859 EXTERN_C_BEGIN
860 #undef __FUNCT__
861 #define __FUNCT__ "VecCreate_FETI"
862 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
863 {
864   PetscErrorCode ierr;
865 
866   PetscFunctionBegin;
867   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
868 
869   /* overwrite the functions to handle setting values locally */
870   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
871   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
872   vv->ops->assemblybegin           = 0;
873   vv->ops->assemblyend             = 0;
874   vv->ops->setvaluesblocked        = 0;
875   vv->ops->setvaluesblocked        = 0;
876 
877   PetscFunctionReturn(0);
878 }
879 EXTERN_C_END
880 
881 
882 
883 
884 
885 
886 
887 
888 
889