xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision a313700dd719903755ca0f3f2e1c5ee7e6be3382)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #if 0
8 #undef __FUNCT__
9 #define __FUNCT__ "VecPublish_MPI"
10 static PetscErrorCode VecPublish_MPI(PetscObject obj)
11 {
12   PetscFunctionBegin;
13   PetscFunctionReturn(0);
14 }
15 #endif
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "VecDot_MPI"
19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
26   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
27   *z = sum;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "VecTDot_MPI"
33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34 {
35   PetscScalar    sum,work;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
40   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
41   *z   = sum;
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "VecSetOption_MPI"
47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48 {
49   PetscFunctionBegin;
50   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51     v->stash.donotstash = flag;
52   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53     v->stash.ignorenegidx = flag;
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
59 EXTERN_C_BEGIN
60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
61 EXTERN_C_END
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "VecPlaceArray_MPI"
65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66 {
67   PetscErrorCode ierr;
68   Vec_MPI        *v = (Vec_MPI *)vin->data;
69 
70   PetscFunctionBegin;
71   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
73   v->array = (PetscScalar *)a;
74   if (v->localrep) {
75     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*);
81 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
82 
83 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
84             VecDuplicateVecs_Default,
85             VecDestroyVecs_Default,
86             VecDot_MPI,
87             VecMDot_MPI,
88             VecNorm_MPI,
89             VecTDot_MPI,
90             VecMTDot_MPI,
91             VecScale_Seq,
92             VecCopy_Seq, /* 10 */
93             VecSet_Seq,
94             VecSwap_Seq,
95             VecAXPY_Seq,
96             VecAXPBY_Seq,
97             VecMAXPY_Seq,
98             VecAYPX_Seq,
99             VecWAXPY_Seq,
100             VecAXPBYPCZ_Seq,
101             VecPointwiseMult_Seq,
102             VecPointwiseDivide_Seq,
103             VecSetValues_MPI, /* 20 */
104             VecAssemblyBegin_MPI,
105             VecAssemblyEnd_MPI,
106             VecGetArray_Seq,
107             VecGetSize_MPI,
108             VecGetSize_Seq,
109             VecRestoreArray_Seq,
110             VecMax_MPI,
111             VecMin_MPI,
112             VecSetRandom_Seq,
113             VecSetOption_MPI,
114             VecSetValuesBlocked_MPI,
115             VecDestroy_MPI,
116             VecView_MPI,
117             VecPlaceArray_MPI,
118             VecReplaceArray_Seq,
119             VecDot_Seq,
120             VecTDot_Seq,
121             VecNorm_Seq,
122             VecMDot_Seq,
123             VecMTDot_Seq,
124             VecLoadIntoVector_Default,
125             VecReciprocal_Default,
126             0, /* VecViewNative... */
127             VecConjugate_Seq,
128             0,
129             0,
130             VecResetArray_Seq,
131             0,
132             VecMaxPointwiseDivide_Seq,
133             VecLoad_Binary,
134             VecPointwiseMax_Seq,
135             VecPointwiseMaxAbs_Seq,
136             VecPointwiseMin_Seq,
137             VecGetValues_MPI};
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "VecCreate_MPI_Private"
141 /*
142     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
143     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
144     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
145 
146     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
147     no space is allocated.
148 */
149 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
150 {
151   Vec_MPI        *s;
152   PetscErrorCode ierr;
153 
154   PetscFunctionBegin;
155 
156   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
157   v->data        = (void*)s;
158   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
159   s->nghost      = nghost;
160   v->mapping     = 0;
161   v->bmapping    = 0;
162   v->petscnative = PETSC_TRUE;
163 
164   if (v->map.bs == -1) v->map.bs = 1;
165   ierr = PetscMapSetUp(&v->map);CHKERRQ(ierr);
166   s->array           = (PetscScalar *)array;
167   s->array_allocated = 0;
168   if (alloc) {
169     PetscInt n         = v->map.n+nghost;
170     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
171     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
172     ierr               = PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));CHKERRQ(ierr);
173     s->array_allocated = s->array;
174   }
175 
176   /* By default parallel vectors do not have local representation */
177   s->localrep    = 0;
178   s->localupdate = 0;
179 
180   v->stash.insertmode  = NOT_SET_VALUES;
181   /* create the stashes. The block-size for bstash is set later when
182      VecSetValuesBlocked is called.
183   */
184   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
185   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map.bs,&v->bstash);CHKERRQ(ierr);
186 
187 #if defined(PETSC_HAVE_MATLAB_ENGINE)
188   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
189   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
190 #endif
191   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
192   ierr = PetscPublishAll(v);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 /*MC
197    VECMPI - VECMPI = "mpi" - The basic parallel vector
198 
199    Options Database Keys:
200 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
201 
202   Level: beginner
203 
204 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
205 M*/
206 
207 EXTERN_C_BEGIN
208 #undef __FUNCT__
209 #define __FUNCT__ "VecCreate_MPI"
210 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
211 {
212   PetscErrorCode ierr;
213 
214   PetscFunctionBegin;
215   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
216   PetscFunctionReturn(0);
217 }
218 EXTERN_C_END
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "VecCreateMPIWithArray"
222 /*@C
223    VecCreateMPIWithArray - Creates a parallel, array-style vector,
224    where the user provides the array space to store the vector values.
225 
226    Collective on MPI_Comm
227 
228    Input Parameters:
229 +  comm  - the MPI communicator to use
230 .  n     - local vector length, cannot be PETSC_DECIDE
231 .  N     - global vector length (or PETSC_DECIDE to have calculated)
232 -  array - the user provided array to store the vector values
233 
234    Output Parameter:
235 .  vv - the vector
236 
237    Notes:
238    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
239    same type as an existing vector.
240 
241    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
242    at a later stage to SET the array for storing the vector values.
243 
244    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
245    The user should not free the array until the vector is destroyed.
246 
247    Level: intermediate
248 
249    Concepts: vectors^creating with array
250 
251 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
252           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
253 
254 @*/
255 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
256 {
257   PetscErrorCode ierr;
258 
259   PetscFunctionBegin;
260   if (n == PETSC_DECIDE) {
261     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
262   }
263   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
264   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
265   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
266   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
267   PetscFunctionReturn(0);
268 }
269 
270 #undef __FUNCT__
271 #define __FUNCT__ "VecGhostGetLocalForm"
272 /*@
273     VecGhostGetLocalForm - Obtains the local ghosted representation of
274     a parallel vector created with VecCreateGhost().
275 
276     Not Collective
277 
278     Input Parameter:
279 .   g - the global vector. Vector must be have been obtained with either
280         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
281 
282     Output Parameter:
283 .   l - the local (ghosted) representation
284 
285     Notes:
286     This routine does not actually update the ghost values, but rather it
287     returns a sequential vector that includes the locations for the ghost
288     values and their current values. The returned vector and the original
289     vector passed in share the same array that contains the actual vector data.
290 
291     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
292     finished using the object.
293 
294     Level: advanced
295 
296    Concepts: vectors^ghost point access
297 
298 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
299 
300 @*/
301 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
302 {
303   PetscErrorCode ierr;
304   PetscTruth     isseq,ismpi;
305 
306   PetscFunctionBegin;
307   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
308   PetscValidPointer(l,2);
309 
310   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
311   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
312   if (ismpi) {
313     Vec_MPI *v  = (Vec_MPI*)g->data;
314     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
315     *l = v->localrep;
316   } else if (isseq) {
317     *l = g;
318   } else {
319     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
320   }
321   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
322   PetscFunctionReturn(0);
323 }
324 
325 #undef __FUNCT__
326 #define __FUNCT__ "VecGhostRestoreLocalForm"
327 /*@
328     VecGhostRestoreLocalForm - Restores the local ghosted representation of
329     a parallel vector obtained with VecGhostGetLocalForm().
330 
331     Not Collective
332 
333     Input Parameter:
334 +   g - the global vector
335 -   l - the local (ghosted) representation
336 
337     Notes:
338     This routine does not actually update the ghost values, but rather it
339     returns a sequential vector that includes the locations for the ghost values
340     and their current values.
341 
342     Level: advanced
343 
344 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
345 @*/
346 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
347 {
348   PetscFunctionBegin;
349   PetscObjectDereference((PetscObject)*l);
350   PetscFunctionReturn(0);
351 }
352 
353 #undef __FUNCT__
354 #define __FUNCT__ "VecGhostUpdateBegin"
355 /*@
356    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
357    local representation to global or global representation to local.
358 
359    Collective on Vec
360 
361    Input Parameters:
362 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
363 .  insertmode - one of ADD_VALUES or INSERT_VALUES
364 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
365 
366    Notes:
367    Use the following to update the ghost regions with correct values from the owning process
368 .vb
369        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
370        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
371 .ve
372 
373    Use the following to accumulate the ghost region values onto the owning processors
374 .vb
375        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
376        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
377 .ve
378 
379    To accumulate the ghost region values onto the owning processors and then update
380    the ghost regions correctly, call the later followed by the former, i.e.,
381 .vb
382        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
383        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
384        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
385        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
386 .ve
387 
388    Level: advanced
389 
390 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
391           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
392 
393 @*/
394 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
395 {
396   Vec_MPI        *v;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
401 
402   v  = (Vec_MPI*)g->data;
403   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
404   if (!v->localupdate) PetscFunctionReturn(0);
405 
406   if (scattermode == SCATTER_REVERSE) {
407     ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
408   } else {
409     ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
410   }
411   PetscFunctionReturn(0);
412 }
413 
414 #undef __FUNCT__
415 #define __FUNCT__ "VecGhostUpdateEnd"
416 /*@
417    VecGhostUpdateEnd - End the vector scatter to update the vector from
418    local representation to global or global representation to local.
419 
420    Collective on Vec
421 
422    Input Parameters:
423 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
424 .  insertmode - one of ADD_VALUES or INSERT_VALUES
425 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
426 
427    Notes:
428 
429    Use the following to update the ghost regions with correct values from the owning process
430 .vb
431        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
432        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
433 .ve
434 
435    Use the following to accumulate the ghost region values onto the owning processors
436 .vb
437        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
438        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
439 .ve
440 
441    To accumulate the ghost region values onto the owning processors and then update
442    the ghost regions correctly, call the later followed by the former, i.e.,
443 .vb
444        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
445        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
446        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
447        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
448 .ve
449 
450    Level: advanced
451 
452 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
453           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
454 
455 @*/
456 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
457 {
458   Vec_MPI        *v;
459   PetscErrorCode ierr;
460 
461   PetscFunctionBegin;
462   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
463 
464   v  = (Vec_MPI*)g->data;
465   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
466   if (!v->localupdate) PetscFunctionReturn(0);
467 
468   if (scattermode == SCATTER_REVERSE) {
469     ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
470   } else {
471     ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
472   }
473   PetscFunctionReturn(0);
474 }
475 
476 #undef __FUNCT__
477 #define __FUNCT__ "VecCreateGhostWithArray"
478 /*@C
479    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
480    the caller allocates the array space.
481 
482    Collective on MPI_Comm
483 
484    Input Parameters:
485 +  comm - the MPI communicator to use
486 .  n - local vector length
487 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
488 .  nghost - number of local ghost points
489 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
490 -  array - the space to store the vector values (as long as n + nghost)
491 
492    Output Parameter:
493 .  vv - the global vector representation (without ghost points as part of vector)
494 
495    Notes:
496    Use VecGhostGetLocalForm() to access the local, ghosted representation
497    of the vector.
498 
499    This also automatically sets the ISLocalToGlobalMapping() for this vector.
500 
501    Level: advanced
502 
503    Concepts: vectors^creating with array
504    Concepts: vectors^ghosted
505 
506 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
507           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
508           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
509 
510 @*/
511 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
512 {
513   PetscErrorCode         ierr;
514   Vec_MPI                *w;
515   PetscScalar            *larray;
516   IS                     from,to;
517   ISLocalToGlobalMapping ltog;
518   PetscInt               rstart,i,*indices;
519 
520   PetscFunctionBegin;
521   *vv = 0;
522 
523   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
524   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
525   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
526   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
527   /* Create global representation */
528   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
529   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
530   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
531   w    = (Vec_MPI *)(*vv)->data;
532   /* Create local representation */
533   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
534   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
535   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
536   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
537 
538   /*
539        Create scatter context for scattering (updating) ghost values
540   */
541   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
542   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
543   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
544   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
545   ierr = ISDestroy(to);CHKERRQ(ierr);
546   ierr = ISDestroy(from);CHKERRQ(ierr);
547 
548   /* set local to global mapping for ghosted vector */
549   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
550   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
551   for (i=0; i<n; i++) {
552     indices[i] = rstart + i;
553   }
554   for (i=0; i<nghost; i++) {
555     indices[n+i] = ghosts[i];
556   }
557   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
558   ierr = PetscFree(indices);CHKERRQ(ierr);
559   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
560   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
561   ierr = PetscFree(indices);CHKERRQ(ierr);
562   PetscFunctionReturn(0);
563 }
564 
565 #undef __FUNCT__
566 #define __FUNCT__ "VecCreateGhost"
567 /*@
568    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
569 
570    Collective on MPI_Comm
571 
572    Input Parameters:
573 +  comm - the MPI communicator to use
574 .  n - local vector length
575 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
576 .  nghost - number of local ghost points
577 -  ghosts - global indices of ghost points
578 
579    Output Parameter:
580 .  vv - the global vector representation (without ghost points as part of vector)
581 
582    Notes:
583    Use VecGhostGetLocalForm() to access the local, ghosted representation
584    of the vector.
585 
586    This also automatically sets the ISLocalToGlobalMapping() for this vector.
587 
588    Level: advanced
589 
590    Concepts: vectors^ghosted
591 
592 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
593           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
594           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
595           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
596 
597 @*/
598 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
599 {
600   PetscErrorCode ierr;
601 
602   PetscFunctionBegin;
603   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
604   PetscFunctionReturn(0);
605 }
606 
607 #undef __FUNCT__
608 #define __FUNCT__ "VecDuplicate_MPI"
609 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
610 {
611   PetscErrorCode ierr;
612   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
613   PetscScalar    *array;
614 
615   PetscFunctionBegin;
616   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
617   ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr);
618   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
619   vw   = (Vec_MPI *)(*v)->data;
620   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
621 
622   /* save local representation of the parallel vector (and scatter) if it exists */
623   if (w->localrep) {
624     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
625     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
626     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
627     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
628     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
629     vw->localupdate = w->localupdate;
630     if (vw->localupdate) {
631       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
632     }
633   }
634 
635   /* New vector should inherit stashing property of parent */
636   (*v)->stash.donotstash = win->stash.donotstash;
637   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
638 
639   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
640   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
641   if (win->mapping) {
642     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
643     (*v)->mapping = win->mapping;
644   }
645   if (win->bmapping) {
646     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
647     (*v)->bmapping = win->bmapping;
648   }
649   (*v)->map.bs    = win->map.bs;
650   (*v)->bstash.bs = win->bstash.bs;
651 
652   PetscFunctionReturn(0);
653 }
654 
655 /* ------------------------------------------------------------------------------------------*/
656 #undef __FUNCT__
657 #define __FUNCT__ "VecCreateGhostBlockWithArray"
658 /*@C
659    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
660    the caller allocates the array space. Indices in the ghost region are based on blocks.
661 
662    Collective on MPI_Comm
663 
664    Input Parameters:
665 +  comm - the MPI communicator to use
666 .  bs - block size
667 .  n - local vector length
668 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
669 .  nghost - number of local ghost blocks
670 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
671 -  array - the space to store the vector values (as long as n + nghost*bs)
672 
673    Output Parameter:
674 .  vv - the global vector representation (without ghost points as part of vector)
675 
676    Notes:
677    Use VecGhostGetLocalForm() to access the local, ghosted representation
678    of the vector.
679 
680    n is the local vector size (total local size not the number of blocks) while nghost
681    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
682    portion is bs*nghost
683 
684    Level: advanced
685 
686    Concepts: vectors^creating ghosted
687    Concepts: vectors^creating with array
688 
689 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
690           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
691           VecCreateGhostWithArray(), VecCreateGhostBlocked()
692 
693 @*/
694 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
695 {
696   PetscErrorCode ierr;
697   Vec_MPI        *w;
698   PetscScalar    *larray;
699   IS             from,to;
700 
701   PetscFunctionBegin;
702   *vv = 0;
703 
704   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
705   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
706   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
707   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
708   /* Create global representation */
709   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
710   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
711   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,nghost*bs,array);CHKERRQ(ierr);
712   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
713   w    = (Vec_MPI *)(*vv)->data;
714   /* Create local representation */
715   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
716   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
717   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
718   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
719   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
720 
721   /*
722        Create scatter context for scattering (updating) ghost values
723   */
724   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
725   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
726   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
727   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
728   ierr = ISDestroy(to);CHKERRQ(ierr);
729   ierr = ISDestroy(from);CHKERRQ(ierr);
730 
731   PetscFunctionReturn(0);
732 }
733 
734 #undef __FUNCT__
735 #define __FUNCT__ "VecCreateGhostBlock"
736 /*@
737    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
738         The indicing of the ghost points is done with blocks.
739 
740    Collective on MPI_Comm
741 
742    Input Parameters:
743 +  comm - the MPI communicator to use
744 .  bs - the block size
745 .  n - local vector length
746 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
747 .  nghost - number of local ghost blocks
748 -  ghosts - global indices of ghost blocks
749 
750    Output Parameter:
751 .  vv - the global vector representation (without ghost points as part of vector)
752 
753    Notes:
754    Use VecGhostGetLocalForm() to access the local, ghosted representation
755    of the vector.
756 
757    n is the local vector size (total local size not the number of blocks) while nghost
758    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
759    portion is bs*nghost
760 
761    Level: advanced
762 
763    Concepts: vectors^ghosted
764 
765 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
766           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
767           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
768 
769 @*/
770 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
771 {
772   PetscErrorCode ierr;
773 
774   PetscFunctionBegin;
775   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
776   PetscFunctionReturn(0);
777 }
778 
779 /*
780     These introduce a ghosted vector where the ghosting is determined by the call to
781   VecSetLocalToGlobalMapping()
782 */
783 
784 #undef __FUNCT__
785 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
786 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
787 {
788   PetscErrorCode ierr;
789   Vec_MPI        *v = (Vec_MPI *)vv->data;
790 
791   PetscFunctionBegin;
792   v->nghost = map->n - vv->map.n;
793 
794   /* we need to make longer the array space that was allocated when the vector was created */
795   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
796   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
797   v->array = v->array_allocated;
798 
799   /* Create local representation */
800   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
801   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
802   PetscFunctionReturn(0);
803 }
804 
805 
806 #undef __FUNCT__
807 #define __FUNCT__ "VecSetValuesLocal_FETI"
808 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
809 {
810   PetscErrorCode ierr;
811   Vec_MPI        *v = (Vec_MPI *)vv->data;
812 
813   PetscFunctionBegin;
814   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
815   PetscFunctionReturn(0);
816 }
817 
818 EXTERN_C_BEGIN
819 #undef __FUNCT__
820 #define __FUNCT__ "VecCreate_FETI"
821 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
822 {
823   PetscErrorCode ierr;
824 
825   PetscFunctionBegin;
826   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
827 
828   /* overwrite the functions to handle setting values locally */
829   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
830   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
831   vv->ops->assemblybegin           = 0;
832   vv->ops->assemblyend             = 0;
833   vv->ops->setvaluesblocked        = 0;
834   vv->ops->setvaluesblocked        = 0;
835 
836   PetscFunctionReturn(0);
837 }
838 EXTERN_C_END
839 
840 
841 
842 
843 
844 
845 
846 
847 
848