xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision d8ccf1fba21e982ff19766be89df46ec8f3bd34a)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8        Note this code is very similar to VecPublish_Seq()
9 */
10 #undef __FUNCT__
11 #define __FUNCT__ "VecPublish_MPI"
12 static PetscErrorCode VecPublish_MPI(PetscObject obj)
13 {
14   PetscFunctionBegin;
15   PetscFunctionReturn(0);
16 }
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "VecDot_MPI"
20 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
28   *z = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "VecTDot_MPI"
34 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
35 {
36   PetscScalar    sum,work;
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
41   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
42   *z   = sum;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "VecSetOption_MPI"
48 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
49 {
50   PetscFunctionBegin;
51   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
52     v->stash.donotstash = PETSC_TRUE;
53   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
54     v->stash.donotstash = PETSC_FALSE;
55   }
56   PetscFunctionReturn(0);
57 }
58 
59 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
60 EXTERN_C_BEGIN
61 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
62 EXTERN_C_END
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "VecPlaceArray_MPI"
66 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
67 {
68   PetscErrorCode ierr;
69   Vec_MPI        *v = (Vec_MPI *)vin->data;
70 
71   PetscFunctionBegin;
72   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
73   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
74   v->array = (PetscScalar *)a;
75   if (v->localrep) {
76     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
77   }
78   PetscFunctionReturn(0);
79 }
80 
81 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
82 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
83 
84 static struct _VecOps DvOps = { VecDuplicate_MPI,
85             VecDuplicateVecs_Default,
86             VecDestroyVecs_Default,
87             VecDot_MPI,
88             VecMDot_MPI,
89             VecNorm_MPI,
90             VecTDot_MPI,
91             VecMTDot_MPI,
92             VecScale_Seq,
93             VecCopy_Seq,
94             VecSet_Seq,
95             VecSwap_Seq,
96             VecAXPY_Seq,
97             VecAXPBY_Seq,
98             VecMAXPY_Seq,
99             VecAYPX_Seq,
100             VecWAXPY_Seq,
101             VecPointwiseMult_Seq,
102             VecPointwiseDivide_Seq,
103             VecSetValues_MPI,
104             VecAssemblyBegin_MPI,
105             VecAssemblyEnd_MPI,
106             VecGetArray_Seq,
107             VecGetSize_MPI,
108             VecGetSize_Seq,
109             VecRestoreArray_Seq,
110             VecMax_MPI,
111             VecMin_MPI,
112             VecSetRandom_Seq,
113             VecSetOption_MPI,
114             VecSetValuesBlocked_MPI,
115             VecDestroy_MPI,
116             VecView_MPI,
117             VecPlaceArray_MPI,
118             VecReplaceArray_Seq,
119             VecDot_Seq,
120             VecTDot_Seq,
121             VecNorm_Seq,
122             VecLoadIntoVector_Default,
123             VecReciprocal_Default,
124             0, /* VecViewNative... */
125             VecConjugate_Seq,
126             0,
127             0,
128             VecResetArray_Seq,
129             0,
130             VecMaxPointwiseDivide_Seq,
131             VecLoad_Binary,
132             VecPointwiseMax_Seq,
133             VecPointwiseMaxAbs_Seq,
134             VecPointwiseMin_Seq,
135             VecGetValues_MPI};
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "VecCreate_MPI_Private"
139 /*
140     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
141     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
142     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
143 */
144 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[],PetscMap map)
145 {
146   Vec_MPI        *s;
147   PetscErrorCode ierr;
148 
149   PetscFunctionBegin;
150 
151   v->bops->publish   = VecPublish_MPI;
152   ierr = PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));CHKERRQ(ierr);
153   ierr           = PetscNew(Vec_MPI,&s);CHKERRQ(ierr);
154   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
155   v->data        = (void*)s;
156   s->nghost      = nghost;
157   v->mapping     = 0;
158   v->bmapping    = 0;
159   v->petscnative = PETSC_TRUE;
160 
161   if (array) {
162     s->array           = (PetscScalar *)array;
163     s->array_allocated = 0;
164   } else {
165     PetscInt n         = v->n+nghost;
166     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
167     s->array_allocated = s->array;
168     ierr               = PetscMemzero(s->array,v->n*sizeof(PetscScalar));CHKERRQ(ierr);
169   }
170 
171   /* By default parallel vectors do not have local representation */
172   s->localrep    = 0;
173   s->localupdate = 0;
174 
175   v->stash.insertmode  = NOT_SET_VALUES;
176 
177   if (!v->map) {
178     if (!map) {
179       ierr = PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);CHKERRQ(ierr);
180     } else {
181       v->map = map;
182       ierr = PetscObjectReference((PetscObject)map);CHKERRQ(ierr);
183     }
184   }
185   /* create the stashes. The block-size for bstash is set later when
186      VecSetValuesBlocked is called.
187   */
188   ierr = VecStashCreate_Private(v->comm,1,&v->stash);CHKERRQ(ierr);
189   ierr = VecStashCreate_Private(v->comm,v->bs,&v->bstash);CHKERRQ(ierr);
190 
191 #if defined(PETSC_HAVE_MATLAB)
192   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
193   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
194 #endif
195   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
196   ierr = PetscPublishAll(v);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 /*MC
201    VECMPI - VECMPI = "mpi" - The basic parallel vector
202 
203    Options Database Keys:
204 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
205 
206   Level: beginner
207 
208 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
209 M*/
210 
211 EXTERN_C_BEGIN
212 #undef __FUNCT__
213 #define __FUNCT__ "VecCreate_MPI"
214 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
215 {
216   PetscErrorCode ierr;
217 
218   PetscFunctionBegin;
219   if (vv->bs > 0) {
220     ierr = PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);CHKERRQ(ierr);
221   } else {
222     ierr = PetscSplitOwnership(vv->comm,&vv->n,&vv->N);CHKERRQ(ierr);
223   }
224   ierr = VecCreate_MPI_Private(vv,0,0,PETSC_NULL);CHKERRQ(ierr);
225   PetscFunctionReturn(0);
226 }
227 EXTERN_C_END
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "VecCreateMPIWithArray"
231 /*@C
232    VecCreateMPIWithArray - Creates a parallel, array-style vector,
233    where the user provides the array space to store the vector values.
234 
235    Collective on MPI_Comm
236 
237    Input Parameters:
238 +  comm  - the MPI communicator to use
239 .  n     - local vector length, cannot be PETSC_DECIDE
240 .  N     - global vector length (or PETSC_DECIDE to have calculated)
241 -  array - the user provided array to store the vector values
242 
243    Output Parameter:
244 .  vv - the vector
245 
246    Notes:
247    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
248    same type as an existing vector.
249 
250    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
251    at a later stage to SET the array for storing the vector values.
252 
253    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
254    The user should not free the array until the vector is destroyed.
255 
256    Level: intermediate
257 
258    Concepts: vectors^creating with array
259 
260 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
261           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
262 
263 @*/
264 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
265 {
266   PetscErrorCode ierr;
267 
268   PetscFunctionBegin;
269   if (n == PETSC_DECIDE) {
270     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
271   }
272   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
273   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
274   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
275   ierr = VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);CHKERRQ(ierr);
276   PetscFunctionReturn(0);
277 }
278 
279 #undef __FUNCT__
280 #define __FUNCT__ "VecGhostGetLocalForm"
281 /*@
282     VecGhostGetLocalForm - Obtains the local ghosted representation of
283     a parallel vector created with VecCreateGhost().
284 
285     Not Collective
286 
287     Input Parameter:
288 .   g - the global vector. Vector must be have been obtained with either
289         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
290 
291     Output Parameter:
292 .   l - the local (ghosted) representation
293 
294     Notes:
295     This routine does not actually update the ghost values, but rather it
296     returns a sequential vector that includes the locations for the ghost
297     values and their current values. The returned vector and the original
298     vector passed in share the same array that contains the actual vector data.
299 
300     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
301     finished using the object.
302 
303     Level: advanced
304 
305    Concepts: vectors^ghost point access
306 
307 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
308 
309 @*/
310 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
311 {
312   PetscErrorCode ierr;
313   PetscTruth     isseq,ismpi;
314 
315   PetscFunctionBegin;
316   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
317   PetscValidPointer(l,2);
318 
319   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
320   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
321   if (ismpi) {
322     Vec_MPI *v  = (Vec_MPI*)g->data;
323     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
324     *l = v->localrep;
325   } else if (isseq) {
326     *l = g;
327   } else {
328     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
329   }
330   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
331   PetscFunctionReturn(0);
332 }
333 
334 #undef __FUNCT__
335 #define __FUNCT__ "VecGhostRestoreLocalForm"
336 /*@
337     VecGhostRestoreLocalForm - Restores the local ghosted representation of
338     a parallel vector obtained with VecGhostGetLocalForm().
339 
340     Not Collective
341 
342     Input Parameter:
343 +   g - the global vector
344 -   l - the local (ghosted) representation
345 
346     Notes:
347     This routine does not actually update the ghost values, but rather it
348     returns a sequential vector that includes the locations for the ghost values
349     and their current values.
350 
351     Level: advanced
352 
353 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
354 @*/
355 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
356 {
357   PetscFunctionBegin;
358   PetscObjectDereference((PetscObject)*l);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "VecGhostUpdateBegin"
364 /*@
365    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
366    local representation to global or global representation to local.
367 
368    Collective on Vec
369 
370    Input Parameters:
371 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
372 .  insertmode - one of ADD_VALUES or INSERT_VALUES
373 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
374 
375    Notes:
376    Use the following to update the ghost regions with correct values from the owning process
377 .vb
378        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
379        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
380 .ve
381 
382    Use the following to accumulate the ghost region values onto the owning processors
383 .vb
384        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
385        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
386 .ve
387 
388    To accumulate the ghost region values onto the owning processors and then update
389    the ghost regions correctly, call the later followed by the former, i.e.,
390 .vb
391        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
392        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
393        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
394        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
395 .ve
396 
397    Level: advanced
398 
399 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
400           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
401 
402 @*/
403 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
404 {
405   Vec_MPI        *v;
406   PetscErrorCode ierr;
407 
408   PetscFunctionBegin;
409   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
410 
411   v  = (Vec_MPI*)g->data;
412   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
413   if (!v->localupdate) PetscFunctionReturn(0);
414 
415   if (scattermode == SCATTER_REVERSE) {
416     ierr = VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
417   } else {
418     ierr = VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
419   }
420   PetscFunctionReturn(0);
421 }
422 
423 #undef __FUNCT__
424 #define __FUNCT__ "VecGhostUpdateEnd"
425 /*@
426    VecGhostUpdateEnd - End the vector scatter to update the vector from
427    local representation to global or global representation to local.
428 
429    Collective on Vec
430 
431    Input Parameters:
432 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
433 .  insertmode - one of ADD_VALUES or INSERT_VALUES
434 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
435 
436    Notes:
437 
438    Use the following to update the ghost regions with correct values from the owning process
439 .vb
440        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
441        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
442 .ve
443 
444    Use the following to accumulate the ghost region values onto the owning processors
445 .vb
446        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
447        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
448 .ve
449 
450    To accumulate the ghost region values onto the owning processors and then update
451    the ghost regions correctly, call the later followed by the former, i.e.,
452 .vb
453        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
454        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
455        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
456        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
457 .ve
458 
459    Level: advanced
460 
461 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
462           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
463 
464 @*/
465 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
466 {
467   Vec_MPI        *v;
468   PetscErrorCode ierr;
469 
470   PetscFunctionBegin;
471   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
472 
473   v  = (Vec_MPI*)g->data;
474   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
475   if (!v->localupdate) PetscFunctionReturn(0);
476 
477   if (scattermode == SCATTER_REVERSE) {
478     ierr = VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
479   } else {
480     ierr = VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
481   }
482   PetscFunctionReturn(0);
483 }
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "VecCreateGhostWithArray"
487 /*@C
488    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
489    the caller allocates the array space.
490 
491    Collective on MPI_Comm
492 
493    Input Parameters:
494 +  comm - the MPI communicator to use
495 .  n - local vector length
496 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
497 .  nghost - number of local ghost points
498 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
499 -  array - the space to store the vector values (as long as n + nghost)
500 
501    Output Parameter:
502 .  vv - the global vector representation (without ghost points as part of vector)
503 
504    Notes:
505    Use VecGhostGetLocalForm() to access the local, ghosted representation
506    of the vector.
507 
508    Level: advanced
509 
510    Concepts: vectors^creating with array
511    Concepts: vectors^ghosted
512 
513 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
514           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
515           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
516 
517 @*/
518 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
519 {
520   PetscErrorCode ierr;
521   Vec_MPI        *w;
522   PetscScalar    *larray;
523   IS             from,to;
524 
525   PetscFunctionBegin;
526   *vv = 0;
527 
528   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
529   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
530   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
531   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
532   /* Create global representation */
533   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
534   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
535   ierr = VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);CHKERRQ(ierr);
536   w    = (Vec_MPI *)(*vv)->data;
537   /* Create local representation */
538   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
539   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
540   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
541   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
542 
543   /*
544        Create scatter context for scattering (updating) ghost values
545   */
546   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
547   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
548   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
549   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
550   ierr = ISDestroy(to);CHKERRQ(ierr);
551   ierr = ISDestroy(from);CHKERRQ(ierr);
552 
553   PetscFunctionReturn(0);
554 }
555 
556 #undef __FUNCT__
557 #define __FUNCT__ "VecCreateGhost"
558 /*@
559    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
560 
561    Collective on MPI_Comm
562 
563    Input Parameters:
564 +  comm - the MPI communicator to use
565 .  n - local vector length
566 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
567 .  nghost - number of local ghost points
568 -  ghosts - global indices of ghost points
569 
570    Output Parameter:
571 .  vv - the global vector representation (without ghost points as part of vector)
572 
573    Notes:
574    Use VecGhostGetLocalForm() to access the local, ghosted representation
575    of the vector.
576 
577    Level: advanced
578 
579    Concepts: vectors^ghosted
580 
581 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
582           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
583           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
584           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
585 
586 @*/
587 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
588 {
589   PetscErrorCode ierr;
590 
591   PetscFunctionBegin;
592   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
593   PetscFunctionReturn(0);
594 }
595 
596 #undef __FUNCT__
597 #define __FUNCT__ "VecDuplicate_MPI"
598 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
599 {
600   PetscErrorCode ierr;
601   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
602   PetscScalar    *array;
603 
604   PetscFunctionBegin;
605   ierr = VecCreate(win->comm,v);CHKERRQ(ierr);
606   ierr = VecSetSizes(*v,win->n,win->N);CHKERRQ(ierr);
607   ierr = VecCreate_MPI_Private(*v,w->nghost,0,win->map);CHKERRQ(ierr);
608   vw   = (Vec_MPI *)(*v)->data;
609   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
610 
611   /* save local representation of the parallel vector (and scatter) if it exists */
612   if (w->localrep) {
613     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
614     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
615     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
616     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
617     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
618     vw->localupdate = w->localupdate;
619     if (vw->localupdate) {
620       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
621     }
622   }
623 
624   /* New vector should inherit stashing property of parent */
625   (*v)->stash.donotstash = win->stash.donotstash;
626 
627   ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr);
628   ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr);
629   if (win->mapping) {
630     (*v)->mapping = win->mapping;
631     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
632   }
633   if (win->bmapping) {
634     (*v)->bmapping = win->bmapping;
635     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
636   }
637   (*v)->bs        = win->bs;
638   (*v)->bstash.bs = win->bstash.bs;
639 
640   PetscFunctionReturn(0);
641 }
642 
643 /* ------------------------------------------------------------------------------------------*/
644 #undef __FUNCT__
645 #define __FUNCT__ "VecCreateGhostBlockWithArray"
646 /*@C
647    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
648    the caller allocates the array space. Indices in the ghost region are based on blocks.
649 
650    Collective on MPI_Comm
651 
652    Input Parameters:
653 +  comm - the MPI communicator to use
654 .  bs - block size
655 .  n - local vector length
656 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
657 .  nghost - number of local ghost blocks
658 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
659 -  array - the space to store the vector values (as long as n + nghost*bs)
660 
661    Output Parameter:
662 .  vv - the global vector representation (without ghost points as part of vector)
663 
664    Notes:
665    Use VecGhostGetLocalForm() to access the local, ghosted representation
666    of the vector.
667 
668    n is the local vector size (total local size not the number of blocks) while nghost
669    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
670    portion is bs*nghost
671 
672    Level: advanced
673 
674    Concepts: vectors^creating ghosted
675    Concepts: vectors^creating with array
676 
677 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
678           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
679           VecCreateGhostWithArray(), VecCreateGhostBlocked()
680 
681 @*/
682 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
683 {
684   PetscErrorCode ierr;
685   Vec_MPI        *w;
686   PetscScalar    *larray;
687   IS             from,to;
688 
689   PetscFunctionBegin;
690   *vv = 0;
691 
692   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
693   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
694   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
695   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
696   /* Create global representation */
697   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
698   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
699   ierr = VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);CHKERRQ(ierr);
700   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
701   w    = (Vec_MPI *)(*vv)->data;
702   /* Create local representation */
703   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
704   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
705   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
706   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
707   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
708 
709   /*
710        Create scatter context for scattering (updating) ghost values
711   */
712   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
713   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
714   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
715   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
716   ierr = ISDestroy(to);CHKERRQ(ierr);
717   ierr = ISDestroy(from);CHKERRQ(ierr);
718 
719   PetscFunctionReturn(0);
720 }
721 
722 #undef __FUNCT__
723 #define __FUNCT__ "VecCreateGhostBlock"
724 /*@
725    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
726         The indicing of the ghost points is done with blocks.
727 
728    Collective on MPI_Comm
729 
730    Input Parameters:
731 +  comm - the MPI communicator to use
732 .  bs - the block size
733 .  n - local vector length
734 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
735 .  nghost - number of local ghost blocks
736 -  ghosts - global indices of ghost blocks
737 
738    Output Parameter:
739 .  vv - the global vector representation (without ghost points as part of vector)
740 
741    Notes:
742    Use VecGhostGetLocalForm() to access the local, ghosted representation
743    of the vector.
744 
745    n is the local vector size (total local size not the number of blocks) while nghost
746    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
747    portion is bs*nghost
748 
749    Level: advanced
750 
751    Concepts: vectors^ghosted
752 
753 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
754           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
755           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
756 
757 @*/
758 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
759 {
760   PetscErrorCode ierr;
761 
762   PetscFunctionBegin;
763   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 /*
768     These introduce a ghosted vector where the ghosting is determined by the call to
769   VecSetLocalToGlobalMapping()
770 */
771 
772 #undef __FUNCT__
773 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
774 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
775 {
776   PetscErrorCode ierr;
777   Vec_MPI        *v = (Vec_MPI *)vv->data;
778 
779   PetscFunctionBegin;
780   v->nghost = map->n - vv->n;
781 
782   /* we need to make longer the array space that was allocated when the vector was created */
783   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
784   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
785   v->array = v->array_allocated;
786 
787   /* Create local representation */
788   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
789   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
790   PetscFunctionReturn(0);
791 }
792 
793 
794 #undef __FUNCT__
795 #define __FUNCT__ "VecSetValuesLocal_FETI"
796 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
797 {
798   PetscErrorCode ierr;
799   Vec_MPI        *v = (Vec_MPI *)vv->data;
800 
801   PetscFunctionBegin;
802   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
803   PetscFunctionReturn(0);
804 }
805 
806 EXTERN_C_BEGIN
807 #undef __FUNCT__
808 #define __FUNCT__ "VecCreate_FETI"
809 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
810 {
811   PetscErrorCode ierr;
812 
813   PetscFunctionBegin;
814   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
815 
816   /* overwrite the functions to handle setting values locally */
817   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
818   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
819   vv->ops->assemblybegin           = 0;
820   vv->ops->assemblyend             = 0;
821   vv->ops->setvaluesblocked        = 0;
822   vv->ops->setvaluesblocked        = 0;
823 
824   PetscFunctionReturn(0);
825 }
826 EXTERN_C_END
827 
828 
829 
830 
831 
832 
833 
834 
835 
836