xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 9e9a1f8fa1053f1c38c1bea9272846e46b9e1b5e)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #if 0
8 #undef __FUNCT__
9 #define __FUNCT__ "VecPublish_MPI"
10 static PetscErrorCode VecPublish_MPI(PetscObject obj)
11 {
12   PetscFunctionBegin;
13   PetscFunctionReturn(0);
14 }
15 #endif
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "VecDot_MPI"
19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
26   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
27   *z = sum;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "VecTDot_MPI"
33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34 {
35   PetscScalar    sum,work;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
40   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);CHKERRQ(ierr);
41   *z   = sum;
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "VecSetOption_MPI"
47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48 {
49   PetscFunctionBegin;
50   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51     v->stash.donotstash = flag;
52   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53     v->stash.ignorenegidx = flag;
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
59 EXTERN_C_BEGIN
60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
61 EXTERN_C_END
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "VecPlaceArray_MPI"
65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66 {
67   PetscErrorCode ierr;
68   Vec_MPI        *v = (Vec_MPI *)vin->data;
69 
70   PetscFunctionBegin;
71   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
73   v->array = (PetscScalar *)a;
74   if (v->localrep) {
75     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
81 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
82 
83 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
84             VecDuplicateVecs_Default,
85             VecDestroyVecs_Default,
86             VecDot_MPI,
87             VecMDot_MPI,
88             VecNorm_MPI,
89             VecTDot_MPI,
90             VecMTDot_MPI,
91             VecScale_Seq,
92             VecCopy_Seq, /* 10 */
93             VecSet_Seq,
94             VecSwap_Seq,
95             VecAXPY_Seq,
96             VecAXPBY_Seq,
97             VecMAXPY_Seq,
98             VecAYPX_Seq,
99             VecWAXPY_Seq,
100             VecPointwiseMult_Seq,
101             VecPointwiseDivide_Seq,
102             VecSetValues_MPI, /* 20 */
103             VecAssemblyBegin_MPI,
104             VecAssemblyEnd_MPI,
105             VecGetArray_Seq,
106             VecGetSize_MPI,
107             VecGetSize_Seq,
108             VecRestoreArray_Seq,
109             VecMax_MPI,
110             VecMin_MPI,
111             VecSetRandom_Seq,
112             VecSetOption_MPI,
113             VecSetValuesBlocked_MPI,
114             VecDestroy_MPI,
115             VecView_MPI,
116             VecPlaceArray_MPI,
117             VecReplaceArray_Seq,
118             VecDot_Seq,
119             VecTDot_Seq,
120             VecNorm_Seq,
121             VecMDot_Seq,
122             VecMTDot_Seq,
123             VecLoadIntoVector_Default,
124             VecReciprocal_Default,
125             0, /* VecViewNative... */
126             VecConjugate_Seq,
127             0,
128             0,
129             VecResetArray_Seq,
130             0,
131             VecMaxPointwiseDivide_Seq,
132             VecLoad_Binary,
133             VecPointwiseMax_Seq,
134             VecPointwiseMaxAbs_Seq,
135             VecPointwiseMin_Seq,
136             VecGetValues_MPI};
137 
138 #undef __FUNCT__
139 #define __FUNCT__ "VecCreate_MPI_Private"
140 /*
141     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
142     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
143     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
144 */
145 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[])
146 {
147   Vec_MPI        *s;
148   PetscErrorCode ierr;
149 
150   PetscFunctionBegin;
151 
152   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
153   v->data        = (void*)s;
154   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
155   s->nghost      = nghost;
156   v->mapping     = 0;
157   v->bmapping    = 0;
158   v->petscnative = PETSC_TRUE;
159 
160   if (v->map.bs == -1) v->map.bs = 1;
161   ierr = PetscMapSetUp(&v->map);CHKERRQ(ierr);
162   if (array) {
163     s->array           = (PetscScalar *)array;
164     s->array_allocated = 0;
165   } else {
166     PetscInt n         = v->map.n+nghost;
167     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
168     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
169     ierr               = PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));CHKERRQ(ierr);
170     s->array_allocated = s->array;
171   }
172 
173   /* By default parallel vectors do not have local representation */
174   s->localrep    = 0;
175   s->localupdate = 0;
176 
177   v->stash.insertmode  = NOT_SET_VALUES;
178   /* create the stashes. The block-size for bstash is set later when
179      VecSetValuesBlocked is called.
180   */
181   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
182   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map.bs,&v->bstash);CHKERRQ(ierr);
183 
184 #if defined(PETSC_HAVE_MATLAB_ENGINE)
185   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
186   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
187 #endif
188   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
189   ierr = PetscPublishAll(v);CHKERRQ(ierr);
190   PetscFunctionReturn(0);
191 }
192 
193 /*MC
194    VECMPI - VECMPI = "mpi" - The basic parallel vector
195 
196    Options Database Keys:
197 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
198 
199   Level: beginner
200 
201 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
202 M*/
203 
204 EXTERN_C_BEGIN
205 #undef __FUNCT__
206 #define __FUNCT__ "VecCreate_MPI"
207 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
208 {
209   PetscErrorCode ierr;
210 
211   PetscFunctionBegin;
212   ierr = VecCreate_MPI_Private(vv,0,0);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 EXTERN_C_END
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "VecCreateMPIWithArray"
219 /*@C
220    VecCreateMPIWithArray - Creates a parallel, array-style vector,
221    where the user provides the array space to store the vector values.
222 
223    Collective on MPI_Comm
224 
225    Input Parameters:
226 +  comm  - the MPI communicator to use
227 .  n     - local vector length, cannot be PETSC_DECIDE
228 .  N     - global vector length (or PETSC_DECIDE to have calculated)
229 -  array - the user provided array to store the vector values
230 
231    Output Parameter:
232 .  vv - the vector
233 
234    Notes:
235    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
236    same type as an existing vector.
237 
238    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
239    at a later stage to SET the array for storing the vector values.
240 
241    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
242    The user should not free the array until the vector is destroyed.
243 
244    Level: intermediate
245 
246    Concepts: vectors^creating with array
247 
248 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
249           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
250 
251 @*/
252 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   if (n == PETSC_DECIDE) {
258     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
259   }
260   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
261   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
262   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
263   ierr = VecCreate_MPI_Private(*vv,0,array);CHKERRQ(ierr);
264   PetscFunctionReturn(0);
265 }
266 
267 #undef __FUNCT__
268 #define __FUNCT__ "VecGhostGetLocalForm"
269 /*@
270     VecGhostGetLocalForm - Obtains the local ghosted representation of
271     a parallel vector created with VecCreateGhost().
272 
273     Not Collective
274 
275     Input Parameter:
276 .   g - the global vector. Vector must be have been obtained with either
277         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
278 
279     Output Parameter:
280 .   l - the local (ghosted) representation
281 
282     Notes:
283     This routine does not actually update the ghost values, but rather it
284     returns a sequential vector that includes the locations for the ghost
285     values and their current values. The returned vector and the original
286     vector passed in share the same array that contains the actual vector data.
287 
288     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
289     finished using the object.
290 
291     Level: advanced
292 
293    Concepts: vectors^ghost point access
294 
295 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
296 
297 @*/
298 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
299 {
300   PetscErrorCode ierr;
301   PetscTruth     isseq,ismpi;
302 
303   PetscFunctionBegin;
304   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
305   PetscValidPointer(l,2);
306 
307   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
308   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
309   if (ismpi) {
310     Vec_MPI *v  = (Vec_MPI*)g->data;
311     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
312     *l = v->localrep;
313   } else if (isseq) {
314     *l = g;
315   } else {
316     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
317   }
318   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
319   PetscFunctionReturn(0);
320 }
321 
322 #undef __FUNCT__
323 #define __FUNCT__ "VecGhostRestoreLocalForm"
324 /*@
325     VecGhostRestoreLocalForm - Restores the local ghosted representation of
326     a parallel vector obtained with VecGhostGetLocalForm().
327 
328     Not Collective
329 
330     Input Parameter:
331 +   g - the global vector
332 -   l - the local (ghosted) representation
333 
334     Notes:
335     This routine does not actually update the ghost values, but rather it
336     returns a sequential vector that includes the locations for the ghost values
337     and their current values.
338 
339     Level: advanced
340 
341 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
342 @*/
343 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
344 {
345   PetscFunctionBegin;
346   PetscObjectDereference((PetscObject)*l);
347   PetscFunctionReturn(0);
348 }
349 
350 #undef __FUNCT__
351 #define __FUNCT__ "VecGhostUpdateBegin"
352 /*@
353    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
354    local representation to global or global representation to local.
355 
356    Collective on Vec
357 
358    Input Parameters:
359 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
360 .  insertmode - one of ADD_VALUES or INSERT_VALUES
361 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
362 
363    Notes:
364    Use the following to update the ghost regions with correct values from the owning process
365 .vb
366        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
367        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
368 .ve
369 
370    Use the following to accumulate the ghost region values onto the owning processors
371 .vb
372        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
373        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
374 .ve
375 
376    To accumulate the ghost region values onto the owning processors and then update
377    the ghost regions correctly, call the later followed by the former, i.e.,
378 .vb
379        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
380        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
381        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
382        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
383 .ve
384 
385    Level: advanced
386 
387 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
388           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
389 
390 @*/
391 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
392 {
393   Vec_MPI        *v;
394   PetscErrorCode ierr;
395 
396   PetscFunctionBegin;
397   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
398 
399   v  = (Vec_MPI*)g->data;
400   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
401   if (!v->localupdate) PetscFunctionReturn(0);
402 
403   if (scattermode == SCATTER_REVERSE) {
404     ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
405   } else {
406     ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
407   }
408   PetscFunctionReturn(0);
409 }
410 
411 #undef __FUNCT__
412 #define __FUNCT__ "VecGhostUpdateEnd"
413 /*@
414    VecGhostUpdateEnd - End the vector scatter to update the vector from
415    local representation to global or global representation to local.
416 
417    Collective on Vec
418 
419    Input Parameters:
420 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
421 .  insertmode - one of ADD_VALUES or INSERT_VALUES
422 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
423 
424    Notes:
425 
426    Use the following to update the ghost regions with correct values from the owning process
427 .vb
428        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
429        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
430 .ve
431 
432    Use the following to accumulate the ghost region values onto the owning processors
433 .vb
434        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
435        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
436 .ve
437 
438    To accumulate the ghost region values onto the owning processors and then update
439    the ghost regions correctly, call the later followed by the former, i.e.,
440 .vb
441        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
442        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
443        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
444        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
445 .ve
446 
447    Level: advanced
448 
449 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
450           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
451 
452 @*/
453 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
454 {
455   Vec_MPI        *v;
456   PetscErrorCode ierr;
457 
458   PetscFunctionBegin;
459   PetscValidHeaderSpecific(g,VEC_COOKIE,1);
460 
461   v  = (Vec_MPI*)g->data;
462   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
463   if (!v->localupdate) PetscFunctionReturn(0);
464 
465   if (scattermode == SCATTER_REVERSE) {
466     ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
467   } else {
468     ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
469   }
470   PetscFunctionReturn(0);
471 }
472 
473 #undef __FUNCT__
474 #define __FUNCT__ "VecCreateGhostWithArray"
475 /*@C
476    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
477    the caller allocates the array space.
478 
479    Collective on MPI_Comm
480 
481    Input Parameters:
482 +  comm - the MPI communicator to use
483 .  n - local vector length
484 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
485 .  nghost - number of local ghost points
486 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
487 -  array - the space to store the vector values (as long as n + nghost)
488 
489    Output Parameter:
490 .  vv - the global vector representation (without ghost points as part of vector)
491 
492    Notes:
493    Use VecGhostGetLocalForm() to access the local, ghosted representation
494    of the vector.
495 
496    This also automatically sets the ISLocalToGlobalMapping() for this vector.
497 
498    Level: advanced
499 
500    Concepts: vectors^creating with array
501    Concepts: vectors^ghosted
502 
503 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
504           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
505           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
506 
507 @*/
508 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
509 {
510   PetscErrorCode         ierr;
511   Vec_MPI                *w;
512   PetscScalar            *larray;
513   IS                     from,to;
514   ISLocalToGlobalMapping ltog;
515   PetscInt               rstart,i,*indices;
516 
517   PetscFunctionBegin;
518   *vv = 0;
519 
520   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
521   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
522   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
523   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
524   /* Create global representation */
525   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
526   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
527   ierr = VecCreate_MPI_Private(*vv,nghost,array);CHKERRQ(ierr);
528   w    = (Vec_MPI *)(*vv)->data;
529   /* Create local representation */
530   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
531   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
532   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
533   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
534 
535   /*
536        Create scatter context for scattering (updating) ghost values
537   */
538   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
539   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
540   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
541   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
542   ierr = ISDestroy(to);CHKERRQ(ierr);
543   ierr = ISDestroy(from);CHKERRQ(ierr);
544 
545   /* set local to global mapping for ghosted vector */
546   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
547   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
548   for (i=0; i<n; i++) {
549     indices[i] = rstart + i;
550   }
551   for (i=0; i<nghost; i++) {
552     indices[n+i] = ghosts[i];
553   }
554   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
555   ierr = PetscFree(indices);CHKERRQ(ierr);
556   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
557   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
558   ierr = PetscFree(indices);CHKERRQ(ierr);
559   PetscFunctionReturn(0);
560 }
561 
562 #undef __FUNCT__
563 #define __FUNCT__ "VecCreateGhost"
564 /*@
565    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
566 
567    Collective on MPI_Comm
568 
569    Input Parameters:
570 +  comm - the MPI communicator to use
571 .  n - local vector length
572 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
573 .  nghost - number of local ghost points
574 -  ghosts - global indices of ghost points
575 
576    Output Parameter:
577 .  vv - the global vector representation (without ghost points as part of vector)
578 
579    Notes:
580    Use VecGhostGetLocalForm() to access the local, ghosted representation
581    of the vector.
582 
583    This also automatically sets the ISLocalToGlobalMapping() for this vector.
584 
585    Level: advanced
586 
587    Concepts: vectors^ghosted
588 
589 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
590           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
591           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
592           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
593 
594 @*/
595 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
596 {
597   PetscErrorCode ierr;
598 
599   PetscFunctionBegin;
600   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
601   PetscFunctionReturn(0);
602 }
603 
604 #undef __FUNCT__
605 #define __FUNCT__ "VecDuplicate_MPI"
606 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
607 {
608   PetscErrorCode ierr;
609   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
610   PetscScalar    *array;
611 
612   PetscFunctionBegin;
613   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
614   ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr);
615   ierr = VecCreate_MPI_Private(*v,w->nghost,0);CHKERRQ(ierr);
616   vw   = (Vec_MPI *)(*v)->data;
617   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
618 
619   /* save local representation of the parallel vector (and scatter) if it exists */
620   if (w->localrep) {
621     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
622     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
623     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
624     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
625     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
626     vw->localupdate = w->localupdate;
627     if (vw->localupdate) {
628       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
629     }
630   }
631 
632   /* New vector should inherit stashing property of parent */
633   (*v)->stash.donotstash = win->stash.donotstash;
634   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
635 
636   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
637   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
638   if (win->mapping) {
639     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
640     (*v)->mapping = win->mapping;
641   }
642   if (win->bmapping) {
643     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
644     (*v)->bmapping = win->bmapping;
645   }
646   (*v)->map.bs    = win->map.bs;
647   (*v)->bstash.bs = win->bstash.bs;
648 
649   PetscFunctionReturn(0);
650 }
651 
652 /* ------------------------------------------------------------------------------------------*/
653 #undef __FUNCT__
654 #define __FUNCT__ "VecCreateGhostBlockWithArray"
655 /*@C
656    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
657    the caller allocates the array space. Indices in the ghost region are based on blocks.
658 
659    Collective on MPI_Comm
660 
661    Input Parameters:
662 +  comm - the MPI communicator to use
663 .  bs - block size
664 .  n - local vector length
665 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
666 .  nghost - number of local ghost blocks
667 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
668 -  array - the space to store the vector values (as long as n + nghost*bs)
669 
670    Output Parameter:
671 .  vv - the global vector representation (without ghost points as part of vector)
672 
673    Notes:
674    Use VecGhostGetLocalForm() to access the local, ghosted representation
675    of the vector.
676 
677    n is the local vector size (total local size not the number of blocks) while nghost
678    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
679    portion is bs*nghost
680 
681    Level: advanced
682 
683    Concepts: vectors^creating ghosted
684    Concepts: vectors^creating with array
685 
686 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
687           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
688           VecCreateGhostWithArray(), VecCreateGhostBlocked()
689 
690 @*/
691 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
692 {
693   PetscErrorCode ierr;
694   Vec_MPI        *w;
695   PetscScalar    *larray;
696   IS             from,to;
697 
698   PetscFunctionBegin;
699   *vv = 0;
700 
701   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
702   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
703   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
704   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
705   /* Create global representation */
706   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
707   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
708   ierr = VecCreate_MPI_Private(*vv,nghost*bs,array);CHKERRQ(ierr);
709   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
710   w    = (Vec_MPI *)(*vv)->data;
711   /* Create local representation */
712   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
713   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
714   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
715   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
716   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
717 
718   /*
719        Create scatter context for scattering (updating) ghost values
720   */
721   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
722   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
723   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
724   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
725   ierr = ISDestroy(to);CHKERRQ(ierr);
726   ierr = ISDestroy(from);CHKERRQ(ierr);
727 
728   PetscFunctionReturn(0);
729 }
730 
731 #undef __FUNCT__
732 #define __FUNCT__ "VecCreateGhostBlock"
733 /*@
734    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
735         The indicing of the ghost points is done with blocks.
736 
737    Collective on MPI_Comm
738 
739    Input Parameters:
740 +  comm - the MPI communicator to use
741 .  bs - the block size
742 .  n - local vector length
743 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
744 .  nghost - number of local ghost blocks
745 -  ghosts - global indices of ghost blocks
746 
747    Output Parameter:
748 .  vv - the global vector representation (without ghost points as part of vector)
749 
750    Notes:
751    Use VecGhostGetLocalForm() to access the local, ghosted representation
752    of the vector.
753 
754    n is the local vector size (total local size not the number of blocks) while nghost
755    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
756    portion is bs*nghost
757 
758    Level: advanced
759 
760    Concepts: vectors^ghosted
761 
762 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
763           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
764           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
765 
766 @*/
767 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
768 {
769   PetscErrorCode ierr;
770 
771   PetscFunctionBegin;
772   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
773   PetscFunctionReturn(0);
774 }
775 
776 /*
777     These introduce a ghosted vector where the ghosting is determined by the call to
778   VecSetLocalToGlobalMapping()
779 */
780 
781 #undef __FUNCT__
782 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
783 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
784 {
785   PetscErrorCode ierr;
786   Vec_MPI        *v = (Vec_MPI *)vv->data;
787 
788   PetscFunctionBegin;
789   v->nghost = map->n - vv->map.n;
790 
791   /* we need to make longer the array space that was allocated when the vector was created */
792   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
793   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
794   v->array = v->array_allocated;
795 
796   /* Create local representation */
797   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
798   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
799   PetscFunctionReturn(0);
800 }
801 
802 
803 #undef __FUNCT__
804 #define __FUNCT__ "VecSetValuesLocal_FETI"
805 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
806 {
807   PetscErrorCode ierr;
808   Vec_MPI        *v = (Vec_MPI *)vv->data;
809 
810   PetscFunctionBegin;
811   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
812   PetscFunctionReturn(0);
813 }
814 
815 EXTERN_C_BEGIN
816 #undef __FUNCT__
817 #define __FUNCT__ "VecCreate_FETI"
818 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
819 {
820   PetscErrorCode ierr;
821 
822   PetscFunctionBegin;
823   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
824 
825   /* overwrite the functions to handle setting values locally */
826   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
827   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
828   vv->ops->assemblybegin           = 0;
829   vv->ops->assemblyend             = 0;
830   vv->ops->setvaluesblocked        = 0;
831   vv->ops->setvaluesblocked        = 0;
832 
833   PetscFunctionReturn(0);
834 }
835 EXTERN_C_END
836 
837 
838 
839 
840 
841 
842 
843 
844 
845