xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 1f428162ad0d3f3f8d1cec0ccb02b2fdaac4d2ec)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8        Note this code is very similar to VecPublish_Seq()
9 */
10 #undef __FUNCT__
11 #define __FUNCT__ "VecPublish_MPI"
12 static PetscErrorCode VecPublish_MPI(PetscObject obj)
13 {
14   PetscFunctionBegin;
15   PetscFunctionReturn(0);
16 }
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "VecDot_MPI"
20 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
28   *z = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "VecTDot_MPI"
34 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
35 {
36   PetscScalar    sum,work;
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
41   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
42   *z   = sum;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "VecSetOption_MPI"
48 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
49 {
50   PetscFunctionBegin;
51   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
52     v->stash.donotstash = PETSC_TRUE;
53   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
54     v->stash.donotstash = PETSC_FALSE;
55   }
56   PetscFunctionReturn(0);
57 }
58 
59 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
60 EXTERN_C_BEGIN
61 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
62 EXTERN_C_END
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "VecPlaceArray_MPI"
66 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
67 {
68   PetscErrorCode ierr;
69   Vec_MPI        *v = (Vec_MPI *)vin->data;
70 
71   PetscFunctionBegin;
72   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
73   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
74   v->array = (PetscScalar *)a;
75   if (v->localrep) {
76     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
77   }
78   PetscFunctionReturn(0);
79 }
80 
81 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
82 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
83 
84 static struct _VecOps DvOps = { VecDuplicate_MPI,
85             VecDuplicateVecs_Default,
86             VecDestroyVecs_Default,
87             VecDot_MPI,
88             VecMDot_MPI,
89             VecNorm_MPI,
90             VecTDot_MPI,
91             VecMTDot_MPI,
92             VecScale_Seq,
93             VecCopy_Seq,
94             VecSet_Seq,
95             VecSwap_Seq,
96             VecAXPY_Seq,
97             VecAXPBY_Seq,
98             VecMAXPY_Seq,
99             VecAYPX_Seq,
100             VecWAXPY_Seq,
101             VecPointwiseMult_Seq,
102             VecPointwiseDivide_Seq,
103             VecSetValues_MPI,
104             VecAssemblyBegin_MPI,
105             VecAssemblyEnd_MPI,
106             VecGetArray_Seq,
107             VecGetSize_MPI,
108             VecGetSize_Seq,
109             VecRestoreArray_Seq,
110             VecMax_MPI,
111             VecMin_MPI,
112             VecSetRandom_Seq,
113             VecSetOption_MPI,
114             VecSetValuesBlocked_MPI,
115             VecDestroy_MPI,
116             VecView_MPI,
117             VecPlaceArray_MPI,
118             VecReplaceArray_Seq,
119             VecDot_Seq,
120             VecTDot_Seq,
121             VecNorm_Seq,
122             VecMDot_Seq,
123             VecMTDot_Seq,
124             VecLoadIntoVector_Default,
125             VecReciprocal_Default,
126             0, /* VecViewNative... */
127             VecConjugate_Seq,
128             0,
129             0,
130             VecResetArray_Seq,
131             0,
132             VecMaxPointwiseDivide_Seq,
133             VecLoad_Binary,
134             VecPointwiseMax_Seq,
135             VecPointwiseMaxAbs_Seq,
136             VecPointwiseMin_Seq,
137             VecGetValues_MPI};
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "VecCreate_MPI_Private"
141 /*
142     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
143     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
144     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
145 */
146 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[])
147 {
148   Vec_MPI        *s;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152 
153   v->bops->publish   = VecPublish_MPI;
154   ierr = PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->map.n+nghost+1)*sizeof(PetscScalar));CHKERRQ(ierr);
155   ierr           = PetscNew(Vec_MPI,&s);CHKERRQ(ierr);
156   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
157   v->data        = (void*)s;
158   s->nghost      = nghost;
159   v->mapping     = 0;
160   v->bmapping    = 0;
161   v->petscnative = PETSC_TRUE;
162 
163   if (v->map.bs == -1) v->map.bs = 1;
164   ierr = PetscMapInitialize(v->comm,&v->map);CHKERRQ(ierr);
165   if (array) {
166     s->array           = (PetscScalar *)array;
167     s->array_allocated = 0;
168   } else {
169     PetscInt n         = v->map.n+nghost;
170     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
171     s->array_allocated = s->array;
172     ierr               = PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));CHKERRQ(ierr);
173   }
174 
175   /* By default parallel vectors do not have local representation */
176   s->localrep    = 0;
177   s->localupdate = 0;
178 
179   v->stash.insertmode  = NOT_SET_VALUES;
180   /* create the stashes. The block-size for bstash is set later when
181      VecSetValuesBlocked is called.
182   */
183   ierr = VecStashCreate_Private(v->comm,1,&v->stash);CHKERRQ(ierr);
184   ierr = VecStashCreate_Private(v->comm,v->map.bs,&v->bstash);CHKERRQ(ierr);
185 
186 #if defined(PETSC_HAVE_MATLAB)
187   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
188   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
189 #endif
190   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
191   ierr = PetscPublishAll(v);CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 
195 /*MC
196    VECMPI - VECMPI = "mpi" - The basic parallel vector
197 
198    Options Database Keys:
199 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
200 
201   Level: beginner
202 
203 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
204 M*/
205 
206 EXTERN_C_BEGIN
207 #undef __FUNCT__
208 #define __FUNCT__ "VecCreate_MPI"
209 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
210 {
211   PetscErrorCode ierr;
212 
213   PetscFunctionBegin;
214   ierr = VecCreate_MPI_Private(vv,0,0);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 EXTERN_C_END
218 
219 #undef __FUNCT__
220 #define __FUNCT__ "VecCreateMPIWithArray"
221 /*@C
222    VecCreateMPIWithArray - Creates a parallel, array-style vector,
223    where the user provides the array space to store the vector values.
224 
225    Collective on MPI_Comm
226 
227    Input Parameters:
228 +  comm  - the MPI communicator to use
229 .  n     - local vector length, cannot be PETSC_DECIDE
230 .  N     - global vector length (or PETSC_DECIDE to have calculated)
231 -  array - the user provided array to store the vector values
232 
233    Output Parameter:
234 .  vv - the vector
235 
236    Notes:
237    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
238    same type as an existing vector.
239 
240    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
241    at a later stage to SET the array for storing the vector values.
242 
243    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
244    The user should not free the array until the vector is destroyed.
245 
246    Level: intermediate
247 
248    Concepts: vectors^creating with array
249 
250 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
251           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
252 
253 @*/
254 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
255 {
256   PetscErrorCode ierr;
257 
258   PetscFunctionBegin;
259   if (n == PETSC_DECIDE) {
260     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
261   }
262   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
263   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
264   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
265   ierr = VecCreate_MPI_Private(*vv,0,array);CHKERRQ(ierr);
266   PetscFunctionReturn(0);
267 }
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "VecGhostGetLocalForm"
271 /*@
272     VecGhostGetLocalForm - Obtains the local ghosted representation of
273     a parallel vector created with VecCreateGhost().
274 
275     Not Collective
276 
277     Input Parameter:
278 .   g - the global vector. Vector must be have been obtained with either
279         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
280 
281     Output Parameter:
282 .   l - the local (ghosted) representation
283 
284     Notes:
285     This routine does not actually update the ghost values, but rather it
286     returns a sequential vector that includes the locations for the ghost
287     values and their current values. The returned vector and the original
288     vector passed in share the same array that contains the actual vector data.
289 
290     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
291     finished using the object.
292 
293     Level: advanced
294 
295    Concepts: vectors^ghost point access
296 
297 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
298 
299 @*/
300 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
301 {
302   PetscErrorCode ierr;
303   PetscTruth     isseq,ismpi;
304 
305   PetscFunctionBegin;
306   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
307   PetscValidPointer(l,2);
308 
309   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
310   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
311   if (ismpi) {
312     Vec_MPI *v  = (Vec_MPI*)g->data;
313     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
314     *l = v->localrep;
315   } else if (isseq) {
316     *l = g;
317   } else {
318     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
319   }
320   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 #undef __FUNCT__
325 #define __FUNCT__ "VecGhostRestoreLocalForm"
326 /*@
327     VecGhostRestoreLocalForm - Restores the local ghosted representation of
328     a parallel vector obtained with VecGhostGetLocalForm().
329 
330     Not Collective
331 
332     Input Parameter:
333 +   g - the global vector
334 -   l - the local (ghosted) representation
335 
336     Notes:
337     This routine does not actually update the ghost values, but rather it
338     returns a sequential vector that includes the locations for the ghost values
339     and their current values.
340 
341     Level: advanced
342 
343 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
344 @*/
345 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
346 {
347   PetscFunctionBegin;
348   PetscObjectDereference((PetscObject)*l);
349   PetscFunctionReturn(0);
350 }
351 
352 #undef __FUNCT__
353 #define __FUNCT__ "VecGhostUpdateBegin"
354 /*@
355    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
356    local representation to global or global representation to local.
357 
358    Collective on Vec
359 
360    Input Parameters:
361 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
362 .  insertmode - one of ADD_VALUES or INSERT_VALUES
363 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
364 
365    Notes:
366    Use the following to update the ghost regions with correct values from the owning process
367 .vb
368        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
369        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
370 .ve
371 
372    Use the following to accumulate the ghost region values onto the owning processors
373 .vb
374        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
375        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
376 .ve
377 
378    To accumulate the ghost region values onto the owning processors and then update
379    the ghost regions correctly, call the later followed by the former, i.e.,
380 .vb
381        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
382        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
383        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
384        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
385 .ve
386 
387    Level: advanced
388 
389 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
390           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
391 
392 @*/
393 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
394 {
395   Vec_MPI        *v;
396   PetscErrorCode ierr;
397 
398   PetscFunctionBegin;
399   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
400 
401   v  = (Vec_MPI*)g->data;
402   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
403   if (!v->localupdate) PetscFunctionReturn(0);
404 
405   if (scattermode == SCATTER_REVERSE) {
406     ierr = VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
407   } else {
408     ierr = VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
409   }
410   PetscFunctionReturn(0);
411 }
412 
413 #undef __FUNCT__
414 #define __FUNCT__ "VecGhostUpdateEnd"
415 /*@
416    VecGhostUpdateEnd - End the vector scatter to update the vector from
417    local representation to global or global representation to local.
418 
419    Collective on Vec
420 
421    Input Parameters:
422 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
423 .  insertmode - one of ADD_VALUES or INSERT_VALUES
424 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
425 
426    Notes:
427 
428    Use the following to update the ghost regions with correct values from the owning process
429 .vb
430        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
431        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
432 .ve
433 
434    Use the following to accumulate the ghost region values onto the owning processors
435 .vb
436        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
437        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
438 .ve
439 
440    To accumulate the ghost region values onto the owning processors and then update
441    the ghost regions correctly, call the later followed by the former, i.e.,
442 .vb
443        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
444        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
445        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
446        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
447 .ve
448 
449    Level: advanced
450 
451 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
452           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
453 
454 @*/
455 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
456 {
457   Vec_MPI        *v;
458   PetscErrorCode ierr;
459 
460   PetscFunctionBegin;
461   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
462 
463   v  = (Vec_MPI*)g->data;
464   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
465   if (!v->localupdate) PetscFunctionReturn(0);
466 
467   if (scattermode == SCATTER_REVERSE) {
468     ierr = VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
469   } else {
470     ierr = VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "VecCreateGhostWithArray"
477 /*@C
478    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
479    the caller allocates the array space.
480 
481    Collective on MPI_Comm
482 
483    Input Parameters:
484 +  comm - the MPI communicator to use
485 .  n - local vector length
486 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
487 .  nghost - number of local ghost points
488 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
489 -  array - the space to store the vector values (as long as n + nghost)
490 
491    Output Parameter:
492 .  vv - the global vector representation (without ghost points as part of vector)
493 
494    Notes:
495    Use VecGhostGetLocalForm() to access the local, ghosted representation
496    of the vector.
497 
498    Level: advanced
499 
500    Concepts: vectors^creating with array
501    Concepts: vectors^ghosted
502 
503 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
504           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
505           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
506 
507 @*/
508 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
509 {
510   PetscErrorCode ierr;
511   Vec_MPI        *w;
512   PetscScalar    *larray;
513   IS             from,to;
514 
515   PetscFunctionBegin;
516   *vv = 0;
517 
518   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
519   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
520   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
521   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
522   /* Create global representation */
523   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
524   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
525   ierr = VecCreate_MPI_Private(*vv,nghost,array);CHKERRQ(ierr);
526   w    = (Vec_MPI *)(*vv)->data;
527   /* Create local representation */
528   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
529   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
530   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
531   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
532 
533   /*
534        Create scatter context for scattering (updating) ghost values
535   */
536   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
537   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
538   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
539   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
540   ierr = ISDestroy(to);CHKERRQ(ierr);
541   ierr = ISDestroy(from);CHKERRQ(ierr);
542 
543   PetscFunctionReturn(0);
544 }
545 
546 #undef __FUNCT__
547 #define __FUNCT__ "VecCreateGhost"
548 /*@
549    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
550 
551    Collective on MPI_Comm
552 
553    Input Parameters:
554 +  comm - the MPI communicator to use
555 .  n - local vector length
556 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
557 .  nghost - number of local ghost points
558 -  ghosts - global indices of ghost points
559 
560    Output Parameter:
561 .  vv - the global vector representation (without ghost points as part of vector)
562 
563    Notes:
564    Use VecGhostGetLocalForm() to access the local, ghosted representation
565    of the vector.
566 
567    Level: advanced
568 
569    Concepts: vectors^ghosted
570 
571 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
572           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
573           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
574           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
575 
576 @*/
577 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
578 {
579   PetscErrorCode ierr;
580 
581   PetscFunctionBegin;
582   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
583   PetscFunctionReturn(0);
584 }
585 
586 #undef __FUNCT__
587 #define __FUNCT__ "VecDuplicate_MPI"
588 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
589 {
590   PetscErrorCode ierr;
591   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
592   PetscScalar    *array;
593 
594   PetscFunctionBegin;
595   ierr = VecCreate(win->comm,v);CHKERRQ(ierr);
596   ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr);
597   ierr = VecCreate_MPI_Private(*v,w->nghost,0);CHKERRQ(ierr);
598   vw   = (Vec_MPI *)(*v)->data;
599   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
600 
601   /* save local representation of the parallel vector (and scatter) if it exists */
602   if (w->localrep) {
603     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
604     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
605     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
606     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
607     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
608     vw->localupdate = w->localupdate;
609     if (vw->localupdate) {
610       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
611     }
612   }
613 
614   /* New vector should inherit stashing property of parent */
615   (*v)->stash.donotstash = win->stash.donotstash;
616 
617   ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr);
618   ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr);
619   if (win->mapping) {
620     (*v)->mapping = win->mapping;
621     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
622   }
623   if (win->bmapping) {
624     (*v)->bmapping = win->bmapping;
625     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
626   }
627   (*v)->map.bs        = win->map.bs;
628   (*v)->bstash.bs = win->bstash.bs;
629 
630   PetscFunctionReturn(0);
631 }
632 
633 /* ------------------------------------------------------------------------------------------*/
634 #undef __FUNCT__
635 #define __FUNCT__ "VecCreateGhostBlockWithArray"
636 /*@C
637    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
638    the caller allocates the array space. Indices in the ghost region are based on blocks.
639 
640    Collective on MPI_Comm
641 
642    Input Parameters:
643 +  comm - the MPI communicator to use
644 .  bs - block size
645 .  n - local vector length
646 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
647 .  nghost - number of local ghost blocks
648 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
649 -  array - the space to store the vector values (as long as n + nghost*bs)
650 
651    Output Parameter:
652 .  vv - the global vector representation (without ghost points as part of vector)
653 
654    Notes:
655    Use VecGhostGetLocalForm() to access the local, ghosted representation
656    of the vector.
657 
658    n is the local vector size (total local size not the number of blocks) while nghost
659    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
660    portion is bs*nghost
661 
662    Level: advanced
663 
664    Concepts: vectors^creating ghosted
665    Concepts: vectors^creating with array
666 
667 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
668           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
669           VecCreateGhostWithArray(), VecCreateGhostBlocked()
670 
671 @*/
672 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
673 {
674   PetscErrorCode ierr;
675   Vec_MPI        *w;
676   PetscScalar    *larray;
677   IS             from,to;
678 
679   PetscFunctionBegin;
680   *vv = 0;
681 
682   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
683   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
684   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
685   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
686   /* Create global representation */
687   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
688   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
689   ierr = VecCreate_MPI_Private(*vv,nghost*bs,array);CHKERRQ(ierr);
690   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
691   w    = (Vec_MPI *)(*vv)->data;
692   /* Create local representation */
693   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
694   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
695   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
696   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
697   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
698 
699   /*
700        Create scatter context for scattering (updating) ghost values
701   */
702   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
703   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
704   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
705   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
706   ierr = ISDestroy(to);CHKERRQ(ierr);
707   ierr = ISDestroy(from);CHKERRQ(ierr);
708 
709   PetscFunctionReturn(0);
710 }
711 
712 #undef __FUNCT__
713 #define __FUNCT__ "VecCreateGhostBlock"
714 /*@
715    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
716         The indicing of the ghost points is done with blocks.
717 
718    Collective on MPI_Comm
719 
720    Input Parameters:
721 +  comm - the MPI communicator to use
722 .  bs - the block size
723 .  n - local vector length
724 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
725 .  nghost - number of local ghost blocks
726 -  ghosts - global indices of ghost blocks
727 
728    Output Parameter:
729 .  vv - the global vector representation (without ghost points as part of vector)
730 
731    Notes:
732    Use VecGhostGetLocalForm() to access the local, ghosted representation
733    of the vector.
734 
735    n is the local vector size (total local size not the number of blocks) while nghost
736    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
737    portion is bs*nghost
738 
739    Level: advanced
740 
741    Concepts: vectors^ghosted
742 
743 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
744           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
745           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
746 
747 @*/
748 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
749 {
750   PetscErrorCode ierr;
751 
752   PetscFunctionBegin;
753   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
754   PetscFunctionReturn(0);
755 }
756 
757 /*
758     These introduce a ghosted vector where the ghosting is determined by the call to
759   VecSetLocalToGlobalMapping()
760 */
761 
762 #undef __FUNCT__
763 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
764 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
765 {
766   PetscErrorCode ierr;
767   Vec_MPI        *v = (Vec_MPI *)vv->data;
768 
769   PetscFunctionBegin;
770   v->nghost = map->n - vv->map.n;
771 
772   /* we need to make longer the array space that was allocated when the vector was created */
773   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
774   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
775   v->array = v->array_allocated;
776 
777   /* Create local representation */
778   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
779   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
780   PetscFunctionReturn(0);
781 }
782 
783 
784 #undef __FUNCT__
785 #define __FUNCT__ "VecSetValuesLocal_FETI"
786 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
787 {
788   PetscErrorCode ierr;
789   Vec_MPI        *v = (Vec_MPI *)vv->data;
790 
791   PetscFunctionBegin;
792   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
793   PetscFunctionReturn(0);
794 }
795 
796 EXTERN_C_BEGIN
797 #undef __FUNCT__
798 #define __FUNCT__ "VecCreate_FETI"
799 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
800 {
801   PetscErrorCode ierr;
802 
803   PetscFunctionBegin;
804   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
805 
806   /* overwrite the functions to handle setting values locally */
807   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
808   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
809   vv->ops->assemblybegin           = 0;
810   vv->ops->assemblyend             = 0;
811   vv->ops->setvaluesblocked        = 0;
812   vv->ops->setvaluesblocked        = 0;
813 
814   PetscFunctionReturn(0);
815 }
816 EXTERN_C_END
817 
818 
819 
820 
821 
822 
823 
824 
825 
826