xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 6805f65bba57675c6478fc5d0aad24746736e0fa)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8        Note this code is very similar to VecPublish_Seq()
9 */
10 #undef __FUNCT__
11 #define __FUNCT__ "VecPublish_MPI"
12 static PetscErrorCode VecPublish_MPI(PetscObject obj)
13 {
14   PetscFunctionBegin;
15   PetscFunctionReturn(0);
16 }
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "VecDot_MPI"
20 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
28   *z = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "VecTDot_MPI"
34 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
35 {
36   PetscScalar    sum,work;
37   PetscErrorCode ierr;
38 
39   PetscFunctionBegin;
40   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
41   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);CHKERRQ(ierr);
42   *z   = sum;
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "VecSetOption_MPI"
48 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
49 {
50   PetscFunctionBegin;
51   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
52     v->stash.donotstash = PETSC_TRUE;
53   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
54     v->stash.donotstash = PETSC_FALSE;
55   }
56   PetscFunctionReturn(0);
57 }
58 
59 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
60 EXTERN_C_BEGIN
61 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
62 EXTERN_C_END
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "VecPlaceArray_MPI"
66 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
67 {
68   PetscErrorCode ierr;
69   Vec_MPI        *v = (Vec_MPI *)vin->data;
70 
71   PetscFunctionBegin;
72   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
73   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
74   v->array = (PetscScalar *)a;
75   if (v->localrep) {
76     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
77   }
78   PetscFunctionReturn(0);
79 }
80 
81 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
82 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
83 
84 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
85             VecDuplicateVecs_Default,
86             VecDestroyVecs_Default,
87             VecDot_MPI,
88             VecMDot_MPI,
89             VecNorm_MPI,
90             VecTDot_MPI,
91             VecMTDot_MPI,
92             VecScale_Seq,
93             VecCopy_Seq, /* 10 */
94             VecSet_Seq,
95             VecSwap_Seq,
96             VecAXPY_Seq,
97             VecAXPBY_Seq,
98             VecMAXPY_Seq,
99             VecAYPX_Seq,
100             VecWAXPY_Seq,
101             VecPointwiseMult_Seq,
102             VecPointwiseDivide_Seq,
103             VecSetValues_MPI, /* 20 */
104             VecAssemblyBegin_MPI,
105             VecAssemblyEnd_MPI,
106             VecGetArray_Seq,
107             VecGetSize_MPI,
108             VecGetSize_Seq,
109             VecRestoreArray_Seq,
110             VecMax_MPI,
111             VecMin_MPI,
112             VecSetRandom_Seq,
113             VecSetOption_MPI,
114             VecSetValuesBlocked_MPI,
115             VecDestroy_MPI,
116             VecView_MPI,
117             VecPlaceArray_MPI,
118             VecReplaceArray_Seq,
119             VecDot_Seq,
120             VecTDot_Seq,
121             VecNorm_Seq,
122             VecMDot_Seq,
123             VecMTDot_Seq,
124             VecLoadIntoVector_Default,
125             VecReciprocal_Default,
126             0, /* VecViewNative... */
127             VecConjugate_Seq,
128             0,
129             0,
130             VecResetArray_Seq,
131             0,
132             VecMaxPointwiseDivide_Seq,
133             VecLoad_Binary,
134             VecPointwiseMax_Seq,
135             VecPointwiseMaxAbs_Seq,
136             VecPointwiseMin_Seq,
137             VecGetValues_MPI};
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "VecCreate_MPI_Private"
141 /*
142     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
143     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
144     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
145 */
146 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[])
147 {
148   Vec_MPI        *s;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152 
153   v->bops->publish   = VecPublish_MPI;
154   ierr = PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->map.n+nghost+1)*sizeof(PetscScalar));CHKERRQ(ierr);
155   ierr           = PetscNew(Vec_MPI,&s);CHKERRQ(ierr);
156   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
157   v->data        = (void*)s;
158   s->nghost      = nghost;
159   v->mapping     = 0;
160   v->bmapping    = 0;
161   v->petscnative = PETSC_TRUE;
162 
163   if (v->map.bs == -1) v->map.bs = 1;
164   ierr = PetscMapInitialize(v->comm,&v->map);CHKERRQ(ierr);
165   if (array) {
166     s->array           = (PetscScalar *)array;
167     s->array_allocated = 0;
168   } else {
169     PetscInt n         = v->map.n+nghost;
170     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
171     s->array_allocated = s->array;
172     ierr               = PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));CHKERRQ(ierr);
173   }
174 
175   /* By default parallel vectors do not have local representation */
176   s->localrep    = 0;
177   s->localupdate = 0;
178 
179   v->stash.insertmode  = NOT_SET_VALUES;
180   /* create the stashes. The block-size for bstash is set later when
181      VecSetValuesBlocked is called.
182   */
183   ierr = VecStashCreate_Private(v->comm,1,&v->stash);CHKERRQ(ierr);
184   ierr = VecStashCreate_Private(v->comm,v->map.bs,&v->bstash);CHKERRQ(ierr);
185 
186 #if defined(PETSC_HAVE_MATLAB_ENGINE)
187   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
188   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
189 #endif
190   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
191   ierr = PetscPublishAll(v);CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 
195 /*MC
196    VECMPI - VECMPI = "mpi" - The basic parallel vector
197 
198    Options Database Keys:
199 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
200 
201   Level: beginner
202 
203 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
204 M*/
205 
206 EXTERN_C_BEGIN
207 #undef __FUNCT__
208 #define __FUNCT__ "VecCreate_MPI"
209 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
210 {
211   PetscErrorCode ierr;
212 
213   PetscFunctionBegin;
214   ierr = VecCreate_MPI_Private(vv,0,0);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 EXTERN_C_END
218 
219 #undef __FUNCT__
220 #define __FUNCT__ "VecCreateMPIWithArray"
221 /*@C
222    VecCreateMPIWithArray - Creates a parallel, array-style vector,
223    where the user provides the array space to store the vector values.
224 
225    Collective on MPI_Comm
226 
227    Input Parameters:
228 +  comm  - the MPI communicator to use
229 .  n     - local vector length, cannot be PETSC_DECIDE
230 .  N     - global vector length (or PETSC_DECIDE to have calculated)
231 -  array - the user provided array to store the vector values
232 
233    Output Parameter:
234 .  vv - the vector
235 
236    Notes:
237    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
238    same type as an existing vector.
239 
240    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
241    at a later stage to SET the array for storing the vector values.
242 
243    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
244    The user should not free the array until the vector is destroyed.
245 
246    Level: intermediate
247 
248    Concepts: vectors^creating with array
249 
250 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
251           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
252 
253 @*/
254 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
255 {
256   PetscErrorCode ierr;
257 
258   PetscFunctionBegin;
259   if (n == PETSC_DECIDE) {
260     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
261   }
262   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
263   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
264   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
265   ierr = VecCreate_MPI_Private(*vv,0,array);CHKERRQ(ierr);
266   PetscFunctionReturn(0);
267 }
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "VecGhostGetLocalForm"
271 /*@
272     VecGhostGetLocalForm - Obtains the local ghosted representation of
273     a parallel vector created with VecCreateGhost().
274 
275     Not Collective
276 
277     Input Parameter:
278 .   g - the global vector. Vector must be have been obtained with either
279         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
280 
281     Output Parameter:
282 .   l - the local (ghosted) representation
283 
284     Notes:
285     This routine does not actually update the ghost values, but rather it
286     returns a sequential vector that includes the locations for the ghost
287     values and their current values. The returned vector and the original
288     vector passed in share the same array that contains the actual vector data.
289 
290     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
291     finished using the object.
292 
293     Level: advanced
294 
295    Concepts: vectors^ghost point access
296 
297 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
298 
299 @*/
300 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
301 {
302   PetscErrorCode ierr;
303   PetscTruth     isseq,ismpi;
304 
305   PetscFunctionBegin;
306   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
307   PetscValidPointer(l,2);
308 
309   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
310   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
311   if (ismpi) {
312     Vec_MPI *v  = (Vec_MPI*)g->data;
313     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
314     *l = v->localrep;
315   } else if (isseq) {
316     *l = g;
317   } else {
318     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
319   }
320   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 #undef __FUNCT__
325 #define __FUNCT__ "VecGhostRestoreLocalForm"
326 /*@
327     VecGhostRestoreLocalForm - Restores the local ghosted representation of
328     a parallel vector obtained with VecGhostGetLocalForm().
329 
330     Not Collective
331 
332     Input Parameter:
333 +   g - the global vector
334 -   l - the local (ghosted) representation
335 
336     Notes:
337     This routine does not actually update the ghost values, but rather it
338     returns a sequential vector that includes the locations for the ghost values
339     and their current values.
340 
341     Level: advanced
342 
343 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
344 @*/
345 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
346 {
347   PetscFunctionBegin;
348   PetscObjectDereference((PetscObject)*l);
349   PetscFunctionReturn(0);
350 }
351 
352 #undef __FUNCT__
353 #define __FUNCT__ "VecGhostUpdateBegin"
354 /*@
355    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
356    local representation to global or global representation to local.
357 
358    Collective on Vec
359 
360    Input Parameters:
361 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
362 .  insertmode - one of ADD_VALUES or INSERT_VALUES
363 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
364 
365    Notes:
366    Use the following to update the ghost regions with correct values from the owning process
367 .vb
368        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
369        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
370 .ve
371 
372    Use the following to accumulate the ghost region values onto the owning processors
373 .vb
374        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
375        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
376 .ve
377 
378    To accumulate the ghost region values onto the owning processors and then update
379    the ghost regions correctly, call the later followed by the former, i.e.,
380 .vb
381        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
382        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
383        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
384        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
385 .ve
386 
387    Level: advanced
388 
389 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
390           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
391 
392 @*/
393 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
394 {
395   Vec_MPI        *v;
396   PetscErrorCode ierr;
397 
398   PetscFunctionBegin;
399   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
400 
401   v  = (Vec_MPI*)g->data;
402   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
403   if (!v->localupdate) PetscFunctionReturn(0);
404 
405   if (scattermode == SCATTER_REVERSE) {
406     ierr = VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
407   } else {
408     ierr = VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
409   }
410   PetscFunctionReturn(0);
411 }
412 
413 #undef __FUNCT__
414 #define __FUNCT__ "VecGhostUpdateEnd"
415 /*@
416    VecGhostUpdateEnd - End the vector scatter to update the vector from
417    local representation to global or global representation to local.
418 
419    Collective on Vec
420 
421    Input Parameters:
422 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
423 .  insertmode - one of ADD_VALUES or INSERT_VALUES
424 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
425 
426    Notes:
427 
428    Use the following to update the ghost regions with correct values from the owning process
429 .vb
430        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
431        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
432 .ve
433 
434    Use the following to accumulate the ghost region values onto the owning processors
435 .vb
436        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
437        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
438 .ve
439 
440    To accumulate the ghost region values onto the owning processors and then update
441    the ghost regions correctly, call the later followed by the former, i.e.,
442 .vb
443        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
444        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
445        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
446        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
447 .ve
448 
449    Level: advanced
450 
451 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
452           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
453 
454 @*/
455 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
456 {
457   Vec_MPI        *v;
458   PetscErrorCode ierr;
459 
460   PetscFunctionBegin;
461   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
462 
463   v  = (Vec_MPI*)g->data;
464   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
465   if (!v->localupdate) PetscFunctionReturn(0);
466 
467   if (scattermode == SCATTER_REVERSE) {
468     ierr = VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
469   } else {
470     ierr = VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);CHKERRQ(ierr);
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "VecCreateGhostWithArray"
477 /*@C
478    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
479    the caller allocates the array space.
480 
481    Collective on MPI_Comm
482 
483    Input Parameters:
484 +  comm - the MPI communicator to use
485 .  n - local vector length
486 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
487 .  nghost - number of local ghost points
488 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
489 -  array - the space to store the vector values (as long as n + nghost)
490 
491    Output Parameter:
492 .  vv - the global vector representation (without ghost points as part of vector)
493 
494    Notes:
495    Use VecGhostGetLocalForm() to access the local, ghosted representation
496    of the vector.
497 
498    This also automatically sets the ISLocalToGlobalMapping() for this vector.
499 
500    Level: advanced
501 
502    Concepts: vectors^creating with array
503    Concepts: vectors^ghosted
504 
505 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
506           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
507           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
508 
509 @*/
510 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
511 {
512   PetscErrorCode         ierr;
513   Vec_MPI                *w;
514   PetscScalar            *larray;
515   IS                     from,to;
516   ISLocalToGlobalMapping ltog;
517   PetscInt               rstart,i,*indices;
518 
519   PetscFunctionBegin;
520   *vv = 0;
521 
522   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
523   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
524   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
525   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
526   /* Create global representation */
527   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
528   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
529   ierr = VecCreate_MPI_Private(*vv,nghost,array);CHKERRQ(ierr);
530   w    = (Vec_MPI *)(*vv)->data;
531   /* Create local representation */
532   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
533   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
534   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
535   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
536 
537   /*
538        Create scatter context for scattering (updating) ghost values
539   */
540   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
541   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
542   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
543   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
544   ierr = ISDestroy(to);CHKERRQ(ierr);
545   ierr = ISDestroy(from);CHKERRQ(ierr);
546 
547   /* set local to global mapping for ghosted vector */
548   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
549   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
550   for (i=0; i<n; i++) {
551     indices[i] = rstart + i;
552   }
553   for (i=0; i<nghost; i++) {
554     indices[n+i] = ghosts[i];
555   }
556   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
557   ierr = PetscFree(indices);CHKERRQ(ierr);
558   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
559   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
560   ierr = PetscFree(indices);CHKERRQ(ierr);
561   PetscFunctionReturn(0);
562 }
563 
564 #undef __FUNCT__
565 #define __FUNCT__ "VecCreateGhost"
566 /*@
567    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
568 
569    Collective on MPI_Comm
570 
571    Input Parameters:
572 +  comm - the MPI communicator to use
573 .  n - local vector length
574 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
575 .  nghost - number of local ghost points
576 -  ghosts - global indices of ghost points
577 
578    Output Parameter:
579 .  vv - the global vector representation (without ghost points as part of vector)
580 
581    Notes:
582    Use VecGhostGetLocalForm() to access the local, ghosted representation
583    of the vector.
584 
585    This also automatically sets the ISLocalToGlobalMapping() for this vector.
586 
587    Level: advanced
588 
589    Concepts: vectors^ghosted
590 
591 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
592           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
593           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
594           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
595 
596 @*/
597 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
598 {
599   PetscErrorCode ierr;
600 
601   PetscFunctionBegin;
602   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "VecDuplicate_MPI"
608 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
609 {
610   PetscErrorCode ierr;
611   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
612   PetscScalar    *array;
613 
614   PetscFunctionBegin;
615   ierr = VecCreate(win->comm,v);CHKERRQ(ierr);
616   ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr);
617   ierr = VecCreate_MPI_Private(*v,w->nghost,0);CHKERRQ(ierr);
618   vw   = (Vec_MPI *)(*v)->data;
619   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
620 
621   /* save local representation of the parallel vector (and scatter) if it exists */
622   if (w->localrep) {
623     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
624     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
625     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
626     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
627     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
628     vw->localupdate = w->localupdate;
629     if (vw->localupdate) {
630       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
631     }
632   }
633 
634   /* New vector should inherit stashing property of parent */
635   (*v)->stash.donotstash = win->stash.donotstash;
636 
637   ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr);
638   ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr);
639   if (win->mapping) {
640     (*v)->mapping = win->mapping;
641     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
642   }
643   if (win->bmapping) {
644     (*v)->bmapping = win->bmapping;
645     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
646   }
647   (*v)->map.bs        = win->map.bs;
648   (*v)->bstash.bs = win->bstash.bs;
649 
650   PetscFunctionReturn(0);
651 }
652 
653 /* ------------------------------------------------------------------------------------------*/
654 #undef __FUNCT__
655 #define __FUNCT__ "VecCreateGhostBlockWithArray"
656 /*@C
657    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
658    the caller allocates the array space. Indices in the ghost region are based on blocks.
659 
660    Collective on MPI_Comm
661 
662    Input Parameters:
663 +  comm - the MPI communicator to use
664 .  bs - block size
665 .  n - local vector length
666 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
667 .  nghost - number of local ghost blocks
668 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
669 -  array - the space to store the vector values (as long as n + nghost*bs)
670 
671    Output Parameter:
672 .  vv - the global vector representation (without ghost points as part of vector)
673 
674    Notes:
675    Use VecGhostGetLocalForm() to access the local, ghosted representation
676    of the vector.
677 
678    n is the local vector size (total local size not the number of blocks) while nghost
679    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
680    portion is bs*nghost
681 
682    Level: advanced
683 
684    Concepts: vectors^creating ghosted
685    Concepts: vectors^creating with array
686 
687 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
688           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
689           VecCreateGhostWithArray(), VecCreateGhostBlocked()
690 
691 @*/
692 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
693 {
694   PetscErrorCode ierr;
695   Vec_MPI        *w;
696   PetscScalar    *larray;
697   IS             from,to;
698 
699   PetscFunctionBegin;
700   *vv = 0;
701 
702   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
703   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
704   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
705   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
706   /* Create global representation */
707   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
708   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
709   ierr = VecCreate_MPI_Private(*vv,nghost*bs,array);CHKERRQ(ierr);
710   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
711   w    = (Vec_MPI *)(*vv)->data;
712   /* Create local representation */
713   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
714   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
715   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
716   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
717   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
718 
719   /*
720        Create scatter context for scattering (updating) ghost values
721   */
722   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
723   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
724   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
725   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
726   ierr = ISDestroy(to);CHKERRQ(ierr);
727   ierr = ISDestroy(from);CHKERRQ(ierr);
728 
729   PetscFunctionReturn(0);
730 }
731 
732 #undef __FUNCT__
733 #define __FUNCT__ "VecCreateGhostBlock"
734 /*@
735    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
736         The indicing of the ghost points is done with blocks.
737 
738    Collective on MPI_Comm
739 
740    Input Parameters:
741 +  comm - the MPI communicator to use
742 .  bs - the block size
743 .  n - local vector length
744 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
745 .  nghost - number of local ghost blocks
746 -  ghosts - global indices of ghost blocks
747 
748    Output Parameter:
749 .  vv - the global vector representation (without ghost points as part of vector)
750 
751    Notes:
752    Use VecGhostGetLocalForm() to access the local, ghosted representation
753    of the vector.
754 
755    n is the local vector size (total local size not the number of blocks) while nghost
756    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
757    portion is bs*nghost
758 
759    Level: advanced
760 
761    Concepts: vectors^ghosted
762 
763 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
764           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
765           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
766 
767 @*/
768 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
769 {
770   PetscErrorCode ierr;
771 
772   PetscFunctionBegin;
773   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
774   PetscFunctionReturn(0);
775 }
776 
777 /*
778     These introduce a ghosted vector where the ghosting is determined by the call to
779   VecSetLocalToGlobalMapping()
780 */
781 
782 #undef __FUNCT__
783 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
784 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
785 {
786   PetscErrorCode ierr;
787   Vec_MPI        *v = (Vec_MPI *)vv->data;
788 
789   PetscFunctionBegin;
790   v->nghost = map->n - vv->map.n;
791 
792   /* we need to make longer the array space that was allocated when the vector was created */
793   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
794   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
795   v->array = v->array_allocated;
796 
797   /* Create local representation */
798   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
799   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
800   PetscFunctionReturn(0);
801 }
802 
803 
804 #undef __FUNCT__
805 #define __FUNCT__ "VecSetValuesLocal_FETI"
806 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
807 {
808   PetscErrorCode ierr;
809   Vec_MPI        *v = (Vec_MPI *)vv->data;
810 
811   PetscFunctionBegin;
812   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
813   PetscFunctionReturn(0);
814 }
815 
816 EXTERN_C_BEGIN
817 #undef __FUNCT__
818 #define __FUNCT__ "VecCreate_FETI"
819 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
820 {
821   PetscErrorCode ierr;
822 
823   PetscFunctionBegin;
824   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
825 
826   /* overwrite the functions to handle setting values locally */
827   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
828   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
829   vv->ops->assemblybegin           = 0;
830   vv->ops->assemblyend             = 0;
831   vv->ops->setvaluesblocked        = 0;
832   vv->ops->setvaluesblocked        = 0;
833 
834   PetscFunctionReturn(0);
835 }
836 EXTERN_C_END
837 
838 
839 
840 
841 
842 
843 
844 
845 
846